Merge master@5406228 into git_qt-dev-plus-aosp.
am: a0f3a8d97d

Change-Id: I037fa82b522c9c9d80c4162de45ae6aaa2b80fd0
diff --git a/bin/print_skylab_suite_result b/bin/print_skylab_suite_result
new file mode 100755
index 0000000..0ea30bf
--- /dev/null
+++ b/bin/print_skylab_suite_result
@@ -0,0 +1,12 @@
+#!/bin/bash
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Job aborter daemon
+#
+# See http://goto.google.com/monitor_db_per_job_refactor
+
+set -eu
+readonly bin_dir="$(readlink -e -- "$(dirname -- "$0")")"
+exec "${bin_dir}/python_venv" -m skylab_suite.tko_test_views "$@"
diff --git a/client/common_lib/cros/arc.py b/client/common_lib/cros/arc.py
index dc9b3e3..4b726f2 100644
--- a/client/common_lib/cros/arc.py
+++ b/client/common_lib/cros/arc.py
@@ -12,6 +12,7 @@
 import socket
 import sys
 import tempfile
+import time
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
@@ -34,7 +35,6 @@
 _PROCESS_CHECK_INTERVAL_SECONDS = 1
 _WAIT_FOR_ADB_READY = 60
 _WAIT_FOR_ANDROID_PROCESS_SECONDS = 60
-_WAIT_FOR_DATA_MOUNTED_SECONDS = 60
 _PLAY_STORE_PKG = 'com.android.vending'
 _SETTINGS_PKG = 'com.android.settings'
 
@@ -111,7 +111,7 @@
         return False
 
 
-def _wait_for_data_mounted(timeout=_WAIT_FOR_DATA_MOUNTED_SECONDS):
+def _wait_for_data_mounted(timeout):
     utils.poll_for_condition(
             condition=_is_android_data_mounted,
             desc='Wait for /data mounted',
@@ -127,7 +127,12 @@
     # Although adbd is started at login screen, we still need /data to be
     # mounted to set up key-based authentication. /data should be mounted
     # once the user has logged in.
-    _wait_for_data_mounted()
+    start_time = time.time()
+    _wait_for_data_mounted(timeout)
+    timeout -= (time.time() - start_time)
+    start_time = time.time()
+    arc_common.wait_for_android_boot(timeout)
+    timeout -= (time.time() - start_time)
 
     setup_adb_host()
     if is_adb_connected():
@@ -333,10 +338,9 @@
 def _get_mount_passthrough_pid_internal(job_name):
     """Returns the PID of the mount-passthrough daemon job."""
     job_pid = get_job_pid(job_name)
-    # |job_pid| is the minijail process, obtain the PID of the process running
-    # inside the mount namespace.
-    # FUSE process is the only process running as chronos in the session.
-    return utils.system_output('pgrep -u chronos -s %s' % job_pid)
+    # |job_pid| is the minijail process, the fuse process should be
+    # the only direct child of the minijail process
+    return utils.system_output('pgrep -P %s' % job_pid)
 
 
 def get_mount_passthrough_pid_list():
@@ -362,20 +366,6 @@
     return utils.system_output('pgrep -f -u root ^/usr/bin/arc-obb-mounter')
 
 
-def _is_android_booted():
-    """Return whether Android has completed booting."""
-    return adb_shell('getprop sys.boot_completed', ignore_status=True) == '1'
-
-
-def wait_for_boot_completed(timeout=60, sleep=1):
-    """Waits until sys.boot_completed becomes 1."""
-    utils.poll_for_condition(
-            condition=_is_android_booted,
-            desc='Wait for Android boot',
-            timeout=timeout,  # sec
-            sleep_interval=sleep)  # sec
-
-
 def is_android_process_running(process_name):
     """Return whether Android has completed booting.
 
diff --git a/client/common_lib/cros/assistant_util.py b/client/common_lib/cros/assistant_util.py
index 79700f5..8fa0003 100644
--- a/client/common_lib/cros/assistant_util.py
+++ b/client/common_lib/cros/assistant_util.py
@@ -47,3 +47,77 @@
         raise error.TestFail(
                 autotest_ext.EvaluateJavaScript(
                         'window.__assistant_error_msg'))
+
+
+def enable_hotword(autotest_ext):
+    """Enables hotword in Google Assistant.
+
+    @param autotest_ext private autotest extension.
+    @raise error.TestFail if failed to enable hotword feature within time.
+    """
+    try:
+        autotest_ext.ExecuteJavaScript('''
+            window.__assistant_hotword_ready = 0;
+            chrome.autotestPrivate.setWhitelistedPref(
+              'settings.voice_interaction.hotword.enabled', true,
+              function(response) {
+                if (chrome.runtime.lastError) {
+                  window.__assistant_hotword_ready = -1;
+                  window.__assistant_hotword_error_msg =
+                      chrome.runtime.lastError.message;
+                } else {
+                  window.__assistant_hotword_ready = 1;
+                }
+              });
+            ''')
+    except exceptions.EvaluateException as e:
+        raise error.TestFail('Could not enable Hotword "{}".'.format(e))
+
+    ready = utils.poll_for_condition(
+            lambda: autotest_ext.EvaluateJavaScript(
+                    'window.__assistant_hotword_ready'),
+            desc='Wait for the hotword pref change event to return".')
+
+    if ready == -1:
+        raise error.TestFail(
+                autotest_ext.EvaluateJavaScript(
+                        'window.__assistant_hotword_error_msg'))
+
+
+def send_text_query(autotest_ext, text_query):
+    """Sends text query to Assistant and returns response.
+
+    @param autotest_ext private autotest extension.
+    @param text_query text query.
+    @return dictionary containing the information of Assistant query
+            response, mapping from response type to content.
+    """
+    try:
+        autotest_ext.ExecuteJavaScript('''
+            window.__assistant_response_ready = 0;
+            chrome.autotestPrivate.sendAssistantTextQuery('%s', 10 * 1000,
+                function(response) {
+                  if (chrome.runtime.lastError) {
+                    window.__assistant_response_ready = -1;
+                    window.__assistant_error_msg =
+                        chrome.runtime.lastError.message;
+                  } else {
+                    window.__assistant_response_ready = 1;
+                    window.__query_response = response;
+                  }
+                });
+            ''' % text_query)
+    except exceptions.EvaluateException as e:
+        raise error.TestFail('Could not get Assistant response "%s".' % e)
+
+    is_ready = utils.poll_for_condition(
+            lambda: autotest_ext.EvaluateJavaScript(
+                    'window.__assistant_response_ready'),
+            desc='Waiting for Assistant response.')
+
+    if is_ready == -1:
+        raise error.TestFail(
+                autotest_ext.EvaluateJavaScript(
+                        'window.__assistant_error_msg'))
+
+    return autotest_ext.EvaluateJavaScript('window.__query_response')
diff --git a/client/common_lib/cros/g2f_utils.py b/client/common_lib/cros/g2f_utils.py
index c1f9cd9..4af10cd 100644
--- a/client/common_lib/cros/g2f_utils.py
+++ b/client/common_lib/cros/g2f_utils.py
@@ -2,11 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import logging, os
 import time
 
-from autotest_lib.client.common_lib import error, utils, logging_manager
-
 def StartU2fd(client):
     """Starts u2fd on the client.
 
@@ -29,17 +26,18 @@
 
     return cr50_dev.pop()
 
-def G2fRegister(client, dev, challenge, application):
+def G2fRegister(client, dev, challenge, application, p1=0):
     """Returns a dictionary with TPM status.
 
     @param client: client object to run commands on.
     """
     return client.run('g2ftool --reg --dev=' + dev +
                       ' --challenge=' + challenge +
-                      ' --application=' + application,
+                      ' --application=' + application +
+                      ' --p1=' + str(p1),
                       ignore_status=True)
 
-def G2fAuth(client, dev, challenge, application, key_handle):
+def G2fAuth(client, dev, challenge, application, key_handle, p1=0):
     """Returns a dictionary with TPM status.
 
     @param client: client object to run commands on.
@@ -47,5 +45,6 @@
     return client.run('g2ftool --auth --dev=' + dev +
                       ' --challenge=' + challenge +
                       ' --application=' + application +
-                      ' --key_handle=' + key_handle,
+                      ' --key_handle=' + key_handle +
+                      ' --p1=' + str(p1),
                       ignore_status=True)
diff --git a/client/common_lib/cros/system_metrics_collector.py b/client/common_lib/cros/system_metrics_collector.py
index 7fd6cc7..25005fd 100644
--- a/client/common_lib/cros/system_metrics_collector.py
+++ b/client/common_lib/cros/system_metrics_collector.py
@@ -12,10 +12,43 @@
         @param higher_is_better: Whether a higher value is considered better or
                 not.
         """
-        self.values = []
-        self.description = description
-        self.units = units
-        self.higher_is_better = higher_is_better
+        self._description = description
+        self._units = units
+        self._higher_is_better = higher_is_better
+        self._samples = []
+
+    @property
+    def description(self):
+        """Description of the metric."""
+        return self._description
+
+    @property
+    def units(self):
+        """Units of the metric."""
+        return self._units
+
+    @property
+    def higher_is_better(self):
+        """Whether a higher value is considered better or not."""
+        return self._higher_is_better
+
+    @property
+    def values(self):
+        """Measured values of the metric."""
+        if len(self._samples) == 0:
+            return self._samples
+        return self._aggregate(self._samples)
+
+    @values.setter
+    def values(self, samples):
+        self._samples = samples
+
+    def _aggregate(self, samples):
+        """
+        Subclasses can override this to aggregate the metric into a single
+        sample.
+        """
+        return samples
 
     def pre_collect(self):
         """
@@ -23,32 +56,53 @@
         """
         pass
 
+    def _store_sample(self, sample):
+        self._samples.append(sample)
+
     def collect_metric(self):
         """
-        Collects one metric.
+        Collects one sample.
 
-        Implementations should add a metric value to the self.values list.
+        Implementations should call self._store_sample() once if it's not an
+        aggregate, i.e., it overrides self._aggregate().
         """
-        raise NotImplementedError('Subclasses should override')
+        pass
+
+    @classmethod
+    def from_metric(cls, other):
+        """
+        Instantiate from an existing metric instance.
+        """
+        metric = cls(
+                description=other.description,
+                units=other.units,
+                higher_is_better=other.higher_is_better)
+        metric.values = other.values
+        return metric
 
 class PeakMetric(Metric):
     """
     Metric that collects the peak of another metric.
     """
-    def __init__(self, metric):
-        """
-        Initializes with a Metric.
 
-        @param metric The Metric to get the peak from.
-        """
-        super(PeakMetric, self).__init__(
-                'peak_' + metric.description,
-                units = metric.units,
-                higher_is_better = metric.higher_is_better)
-        self.metric = metric
+    @property
+    def description(self):
+        return 'peak_' + super(PeakMetric, self).description
 
-    def collect_metric(self):
-        self.values = [max(self.metric.values)] if self.metric.values else []
+    def _aggregate(self, samples):
+        return max(samples)
+
+class SumMetric(Metric):
+    """
+    Metric that sums another metric.
+    """
+
+    @property
+    def description(self):
+        return 'sum_' + super(SumMetric, self).description
+
+    def _aggregate(self, samples):
+        return sum(samples)
 
 class MemUsageMetric(Metric):
     """
@@ -66,7 +120,7 @@
         free_memory = self.system_facade.get_mem_free_plus_buffers_and_cached()
         used_memory = total_memory - free_memory
         usage_percent = (used_memory * 100) / total_memory
-        self.values.append(usage_percent)
+        self._store_sample(usage_percent)
 
 class CpuUsageMetric(Metric):
     """
@@ -89,7 +143,7 @@
         # current_usage.
         usage_percent = 100 * self.system_facade.compute_active_cpu_time(
                 self.last_usage, current_usage)
-        self.values.append(usage_percent)
+        self._store_sample(usage_percent)
         self.last_usage = current_usage
 
 class AllocatedFileHandlesMetric(Metric):
@@ -102,7 +156,7 @@
         self.system_facade = system_facade
 
     def collect_metric(self):
-        self.values.append(self.system_facade.get_num_allocated_file_handles())
+        self._store_sample(self.system_facade.get_num_allocated_file_handles())
 
 class StorageWrittenMetric(Metric):
     """
@@ -125,7 +179,7 @@
         statistics = self.system_facade.get_storage_statistics()
         written_kb = statistics['written_kb']
         written_period = written_kb - self.last_written_kb
-        self.values.append(written_period)
+        self._store_sample(written_period)
         self.last_written_kb = written_kb
 
 class TemperatureMetric(Metric):
@@ -137,7 +191,7 @@
         self.system_facade = system_facade
 
     def collect_metric(self):
-        self.values.append(self.system_facade.get_current_temperature_max())
+        self._store_sample(self.system_facade.get_current_temperature_max())
 
 def create_default_metric_set(system_facade):
     """
@@ -151,9 +205,10 @@
     file_handles = AllocatedFileHandlesMetric(system_facade)
     storage_written = StorageWrittenMetric(system_facade)
     temperature = TemperatureMetric(system_facade)
-    peak_cpu = PeakMetric(cpu)
-    peak_mem = PeakMetric(mem)
-    peak_temperature = PeakMetric(temperature)
+    peak_cpu = PeakMetric.from_metric(cpu)
+    peak_mem = PeakMetric.from_metric(mem)
+    peak_temperature = PeakMetric.from_metric(temperature)
+    sum_storage_written = SumMetric.from_metric(storage_written)
     return [cpu,
             mem,
             file_handles,
@@ -161,7 +216,8 @@
             temperature,
             peak_cpu,
             peak_mem,
-            peak_temperature]
+            peak_temperature,
+            sum_storage_written]
 
 class SystemMetricsCollector(object):
     """
diff --git a/client/common_lib/cros/system_metrics_collector_unittest.py b/client/common_lib/cros/system_metrics_collector_unittest.py
index 75f031e..b3d8260 100644
--- a/client/common_lib/cros/system_metrics_collector_unittest.py
+++ b/client/common_lib/cros/system_metrics_collector_unittest.py
@@ -62,28 +62,99 @@
         collector.collect_snapshot()
         collector.write_metrics(lambda **kwargs: None)
 
-    def test_peak_metric_description(self):
-        test_metric = TestMetric()
-        peak_metric = system_metrics_collector.PeakMetric(test_metric)
-        self.assertEqual(peak_metric.description, 'peak_test_description')
+    def test_aggregate_metric_zero_samples(self):
+        metric = TestAggregateMetric()
+        self.assertEqual(metric.values, [])
 
-    def test_peak_metric_one_element(self):
-        test_metric = TestMetric()
-        peak_metric = system_metrics_collector.PeakMetric(test_metric)
-        test_metric.collect_metric()
-        peak_metric.collect_metric()
-        self.assertEqual(peak_metric.values, [1])
+    def test_aggregate_metric_one_sample(self):
+        metric = TestAggregateMetric()
+        metric.collect_metric()
+        self.assertEqual(metric.values, 1)
 
-    def test_peak_metric_many_elements(self):
+    def test_aggregate_metric_many_samples(self):
+        metric = TestAggregateMetric()
+        metric.collect_metric()
+        metric.value = 2
+        metric.collect_metric()
+        metric.value = 3
+        metric.collect_metric()
+        self.assertEqual(metric.values, 3)
+
+    def test_aggregate_metric_from_metric_one_sample(self):
         test_metric = TestMetric()
-        peak_metric = system_metrics_collector.PeakMetric(test_metric)
+        aggregate_metric = LastElementMetric.from_metric(test_metric)
         test_metric.collect_metric()
+        aggregate_metric.collect_metric()
+        self.assertEqual(test_metric.values, [1])
+        self.assertEqual(aggregate_metric.values, 1)
+
+    def test_aggregate_metric_from_metric_many_samples(self):
+        test_metric = TestMetric()
+        aggregate_metric = LastElementMetric.from_metric(test_metric)
+        test_metric.collect_metric()
+        aggregate_metric.collect_metric()
         test_metric.value = 2
         test_metric.collect_metric()
+        aggregate_metric.collect_metric()
+        test_metric.value = 3
+        test_metric.collect_metric()
+        aggregate_metric.collect_metric()
+        self.assertEqual(test_metric.values, [1, 2, 3])
+        self.assertEqual(aggregate_metric.values, 3)
+
+    def test_peak_metric_description(self):
+        metric = system_metrics_collector.PeakMetric('foo')
+        self.assertEqual(metric.description, 'peak_foo')
+
+    def test_peak_metric_many_samples(self):
+        metric = TestPeakMetric()
+        metric.collect_metric()
+        metric.value = 2
+        metric.collect_metric()
+        metric.value = 0
+        metric.collect_metric()
+        self.assertEqual(metric.values, 2)
+
+    def test_peak_metric_from_metric_many_samples(self):
+        test_metric = TestMetric()
+        peak_metric = system_metrics_collector.PeakMetric.from_metric(
+                test_metric)
+        test_metric.collect_metric()
+        peak_metric.collect_metric()
+        test_metric.value = 2
+        test_metric.collect_metric()
+        peak_metric.collect_metric()
         test_metric.value = 0
         test_metric.collect_metric()
         peak_metric.collect_metric()
-        self.assertEqual(peak_metric.values, [2])
+        self.assertEqual(peak_metric.values, 2)
+
+    def test_sum_metric_description(self):
+        metric = system_metrics_collector.SumMetric('foo')
+        self.assertEqual(metric.description, 'sum_foo')
+
+    def test_sum_metric_many_samples(self):
+        metric = TestSumMetric()
+        metric.collect_metric()
+        metric.value = 2
+        metric.collect_metric()
+        metric.value = 3
+        metric.collect_metric()
+        self.assertEqual(metric.values, 6)
+
+    def test_sum_metric_from_metric_many_samples(self):
+        test_metric = TestMetric()
+        sum_metric = system_metrics_collector.SumMetric.from_metric(
+                test_metric)
+        test_metric.collect_metric()
+        sum_metric.collect_metric()
+        test_metric.value = 40
+        test_metric.collect_metric()
+        sum_metric.collect_metric()
+        test_metric.value = 1
+        test_metric.collect_metric()
+        sum_metric.collect_metric()
+        self.assertEqual(sum_metric.values, 42)
 
 class FakeSystemFacade(object):
     def __init__(self):
@@ -128,6 +199,17 @@
         self.value = 1
 
     def collect_metric(self):
-        self.values.append(self.value)
+        self._store_sample(self.value)
 
+class LastElementMetric(system_metrics_collector.Metric):
+    def _aggregate(self, x):
+        return x[-1]
 
+class TestAggregateMetric(TestMetric, LastElementMetric):
+    pass
+
+class TestPeakMetric(TestMetric, system_metrics_collector.PeakMetric):
+    pass
+
+class TestSumMetric(TestMetric, system_metrics_collector.SumMetric):
+    pass
diff --git a/client/cros/enterprise/enterprise_policy_base.py b/client/cros/enterprise/enterprise_policy_base.py
index c83f8e9..3d63dd2 100755
--- a/client/cros/enterprise/enterprise_policy_base.py
+++ b/client/cros/enterprise/enterprise_policy_base.py
@@ -251,6 +251,7 @@
                    arc_mode=False,
                    setup_arc=True,
                    use_clouddpc_test=None,
+                   disable_default_apps=True,
                    extension_paths=[],
                    extra_chrome_flags=[]):
         """Set up DMS, log in, and verify policy values.
@@ -304,6 +305,7 @@
                             init_network_controller=init_network_controller,
                             extension_paths=extension_paths,
                             arc_mode=arc_mode,
+                            disable_default_apps=disable_default_apps,
                             extra_chrome_flags=extra_chrome_flags)
 
         # Skip policy check upon request or if we enroll but don't log in.
@@ -895,6 +897,7 @@
                        auto_login=True,
                        arc_mode=False,
                        init_network_controller=False,
+                       disable_default_apps=True,
                        extension_paths=[],
                        extra_chrome_flags=[]):
         """
@@ -967,7 +970,8 @@
                         autotest_ext=True,
                         init_network_controller=init_network_controller,
                         expect_policy_fetch=True,
-                        extension_paths=extension_paths)
+                        extension_paths=extension_paths,
+                        disable_default_apps=disable_default_apps)
         else:
             self.cr = chrome.Chrome(
                     auto_login=False,
diff --git a/client/cros/scripts/wifi b/client/cros/scripts/wifi
index 4069dbe..1b703f9 100755
--- a/client/cros/scripts/wifi
+++ b/client/cros/scripts/wifi
@@ -51,7 +51,7 @@
 
 
 def configure(ssid, security, passphrase):
-    wifi = wifi_proxy.WifiProxy()
+    wifi = wifi_proxy.WifiProxy.get_proxy()
     security_parameters = {}
     if passphrase is not None:
         security_parameters[wifi.SERVICE_PROPERTY_PASSPHRASE] = passphrase
@@ -78,7 +78,7 @@
     @return True upon success, False otherwise.
 
     """
-    wifi = wifi_proxy.WifiProxy()
+    wifi = wifi_proxy.WifiProxy.get_proxy()
     result = wifi.connect_to_wifi_network(ssid,
             security,
             credentials,
@@ -109,7 +109,7 @@
     @return True upon seeing network is in idle state.
 
     """
-    wifi = wifi_proxy.WifiProxy()
+    wifi = wifi_proxy.WifiProxy.get_proxy()
     result = wifi.disconnect_from_wifi_network(ssid, timeout)
     (successful, duration, reason) = result
     if successful:
diff --git a/client/site_tests/cellular_OutOfCreditsSubscriptionState/cellular_OutOfCreditsSubscriptionState.py b/client/site_tests/cellular_OutOfCreditsSubscriptionState/cellular_OutOfCreditsSubscriptionState.py
index 00174ca..8cdb7e8 100644
--- a/client/site_tests/cellular_OutOfCreditsSubscriptionState/cellular_OutOfCreditsSubscriptionState.py
+++ b/client/site_tests/cellular_OutOfCreditsSubscriptionState/cellular_OutOfCreditsSubscriptionState.py
@@ -110,9 +110,6 @@
             self.pseudomm = pm_proxy.PseudoMMProxy.get_proxy()
             self.modem = self.pseudomm.get_modem()
 
-            tests = [self._test_provisioned,
-                     self._test_out_of_credits_at_start,
-                     self._test_out_of_credits_while_connected]
-
-            for test in tests:
-                test()
+            self._test_provisioned()
+            self._test_out_of_credits_at_start()
+            self._test_out_of_credits_while_connected()
diff --git a/client/site_tests/enterprise_KioskEnrollment/enterprise_KioskEnrollment.py b/client/site_tests/enterprise_KioskEnrollment/enterprise_KioskEnrollment.py
index d4b534d..6644fef 100644
--- a/client/site_tests/enterprise_KioskEnrollment/enterprise_KioskEnrollment.py
+++ b/client/site_tests/enterprise_KioskEnrollment/enterprise_KioskEnrollment.py
@@ -4,12 +4,16 @@
 
 import logging
 import os
+import time
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chrome
 from autotest_lib.client.common_lib.cros import enrollment
 from autotest_lib.client.common_lib.cros import kiosk_utils
+from autotest_lib.client.common_lib import utils as utils2
+
+KIOSK_MODE = 'Starting kiosk mode...'
 
 
 class enterprise_KioskEnrollment(test.test):
@@ -49,4 +53,10 @@
         with chrome.Chrome(auto_login=False,
                            disable_gaia_services=False) as cr:
             enrollment.EnterpriseEnrollment(cr.browser, user_id, password)
-            self._CheckKioskExtensionContexts(cr.browser)
+        # This way of checking a kiosk extension doesn't work.
+        #self._CheckKioskExtensionContexts(cr.browser)
+        time.sleep(15)
+        running_apps = utils2.system_output('cat /var/log/messages | grep kiosk')
+        if KIOSK_MODE not in running_apps:
+            raise error.TestFail(
+                'DUT did not enter kiosk mode. and it should have.')    
diff --git a/client/site_tests/graphics_Gbm/src/gbmtest.c b/client/site_tests/graphics_Gbm/src/gbmtest.c
index 4107bc5..e167032 100644
--- a/client/site_tests/graphics_Gbm/src/gbmtest.c
+++ b/client/site_tests/graphics_Gbm/src/gbmtest.c
@@ -103,6 +103,7 @@
 struct plane_info {
 	uint32_t bits_per_pixel;
 	uint32_t subsample_rate;
+	uint32_t data_mask;
 };
 
 #define MAX_PLANES 3
@@ -114,52 +115,52 @@
 
 /* Bits per pixel for each. */
 static const struct format_info format_info_list[] = {
-	{GBM_FORMAT_C8, 1, {{8, 1}}},
-	{GBM_FORMAT_RGB332, 1, {{8, 1}}},
-	{GBM_FORMAT_BGR233, 1, {{8, 1}}},
-	{GBM_FORMAT_XRGB4444, 1, {{16, 1}}},
-	{GBM_FORMAT_XBGR4444, 1, {{16, 1}}},
-	{GBM_FORMAT_RGBX4444, 1, {{16, 1}}},
-	{GBM_FORMAT_BGRX4444, 1, {{16, 1}}},
-	{GBM_FORMAT_ARGB4444, 1, {{16, 1}}},
-	{GBM_FORMAT_ABGR4444, 1, {{16, 1}}},
-	{GBM_FORMAT_RGBA4444, 1, {{16, 1}}},
-	{GBM_FORMAT_BGRA4444, 1, {{16, 1}}},
-	{GBM_FORMAT_XRGB1555, 1, {{16, 1}}},
-	{GBM_FORMAT_XBGR1555, 1, {{16, 1}}},
-	{GBM_FORMAT_RGBX5551, 1, {{16, 1}}},
-	{GBM_FORMAT_BGRX5551, 1, {{16, 1}}},
-	{GBM_FORMAT_ARGB1555, 1, {{16, 1}}},
-	{GBM_FORMAT_ABGR1555, 1, {{16, 1}}},
-	{GBM_FORMAT_RGBA5551, 1, {{16, 1}}},
-	{GBM_FORMAT_BGRA5551, 1, {{16, 1}}},
-	{GBM_FORMAT_RGB565, 1, {{16, 1}}},
-	{GBM_FORMAT_BGR565, 1, {{16, 1}}},
-	{GBM_FORMAT_RGB888, 1, {{24, 1}}},
-	{GBM_FORMAT_BGR888, 1, {{24, 1}}},
-	{GBM_FORMAT_XRGB8888, 1, {{32, 1}}},
-	{GBM_FORMAT_XBGR8888, 1, {{32, 1}}},
-	{GBM_FORMAT_RGBX8888, 1, {{32, 1}}},
-	{GBM_FORMAT_BGRX8888, 1, {{32, 1}}},
-	{GBM_FORMAT_ARGB8888, 1, {{32, 1}}},
-	{GBM_FORMAT_ABGR8888, 1, {{32, 1}}},
-	{GBM_FORMAT_RGBA8888, 1, {{32, 1}}},
-	{GBM_FORMAT_BGRA8888, 1, {{32, 1}}},
-	{GBM_FORMAT_XRGB2101010, 1, {{32, 1}}},
-	{GBM_FORMAT_XBGR2101010, 1, {{32, 1}}},
-	{GBM_FORMAT_RGBX1010102, 1, {{32, 1}}},
-	{GBM_FORMAT_BGRX1010102, 1, {{32, 1}}},
-	{GBM_FORMAT_ARGB2101010, 1, {{32, 1}}},
-	{GBM_FORMAT_ABGR2101010, 1, {{32, 1}}},
-	{GBM_FORMAT_RGBA1010102, 1, {{32, 1}}},
-	{GBM_FORMAT_BGRA1010102, 1, {{32, 1}}},
-	{GBM_FORMAT_YUYV, 1, {{16, 1}}},
-	{GBM_FORMAT_YVYU, 1, {{16, 1}}},
-	{GBM_FORMAT_UYVY, 1, {{16, 1}}},
-	{GBM_FORMAT_VYUY, 1, {{16, 1}}},
-	{GBM_FORMAT_AYUV, 1, {{32, 1}}},
-	{GBM_FORMAT_NV12, 2, {{8, 1}, {16, 2}}},
-	{GBM_FORMAT_YVU420, 3, {{8, 1}, {8, 2}, {8,2}}},
+	{GBM_FORMAT_C8, 1, {{8, 1, 0xFF}}},
+	{GBM_FORMAT_RGB332, 1, {{8, 1, 0xFF}}},
+	{GBM_FORMAT_BGR233, 1, {{8, 1, 0xFF}}},
+	{GBM_FORMAT_XRGB4444, 1, {{16, 1, 0x0FFF}}},
+	{GBM_FORMAT_XBGR4444, 1, {{16, 1, 0x0FFF}}},
+	{GBM_FORMAT_RGBX4444, 1, {{16, 1, 0xFFF0}}},
+	{GBM_FORMAT_BGRX4444, 1, {{16, 1, 0xFFF0}}},
+	{GBM_FORMAT_ARGB4444, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_ABGR4444, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_RGBA4444, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_BGRA4444, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_XRGB1555, 1, {{16, 1, 0x7FFF}}},
+	{GBM_FORMAT_XBGR1555, 1, {{16, 1, 0x7FFF}}},
+	{GBM_FORMAT_RGBX5551, 1, {{16, 1, 0xFFFE}}},
+	{GBM_FORMAT_BGRX5551, 1, {{16, 1, 0xFFFE}}},
+	{GBM_FORMAT_ARGB1555, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_ABGR1555, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_RGBA5551, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_BGRA5551, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_RGB565, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_BGR565, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_RGB888, 1, {{24, 1, 0xFFFFFF}}},
+	{GBM_FORMAT_BGR888, 1, {{24, 1, 0xFFFFFF}}},
+	{GBM_FORMAT_XRGB8888, 1, {{32, 1, 0x00FFFFFF}}},
+	{GBM_FORMAT_XBGR8888, 1, {{32, 1, 0x00FFFFFF}}},
+	{GBM_FORMAT_RGBX8888, 1, {{32, 1, 0xFFFFFF00}}},
+	{GBM_FORMAT_BGRX8888, 1, {{32, 1, 0xFFFFFF00}}},
+	{GBM_FORMAT_ARGB8888, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_ABGR8888, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_RGBA8888, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_BGRA8888, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_XRGB2101010, 1, {{32, 1, 0x3FFFFFFF}}},
+	{GBM_FORMAT_XBGR2101010, 1, {{32, 1, 0x3FFFFFFF}}},
+	{GBM_FORMAT_RGBX1010102, 1, {{32, 1, 0xFFFFFFFC}}},
+	{GBM_FORMAT_BGRX1010102, 1, {{32, 1, 0xFFFFFFFC}}},
+	{GBM_FORMAT_ARGB2101010, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_ABGR2101010, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_RGBA1010102, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_BGRA1010102, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_YUYV, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_YVYU, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_UYVY, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_VYUY, 1, {{16, 1, 0xFFFF}}},
+	{GBM_FORMAT_AYUV, 1, {{32, 1, 0xFFFFFFFF}}},
+	{GBM_FORMAT_NV12, 2, {{8, 1, 0xFF}, {16, 2, 0xFFFF}}},
+	{GBM_FORMAT_YVU420, 3, {{8, 1, 0xFF}, {8, 2, 0xFF}, {8,2, 0xFF}}},
 };
 
 static const uint32_t usage_list[] = {
@@ -920,7 +921,8 @@
 	uint8_t *pixel;
 	struct gbm_bo *bo;
 	void *map_data, *addr;
-	uint32_t x, y, p, w, h, b, planes, bytes_per_pixel, idx;
+	uint32_t x, y, p, w, h, b, planes, bytes_per_pixel, pixel_data_mask, idx;
+	uint8_t byte_mask;
 	uint32_t stride = 0;
 	const int width = 333;
 	const int height = 444;
@@ -972,11 +974,14 @@
 
 		pixel = (uint8_t *)addr;
 		bytes_per_pixel = format_info_list[format_index].planes[p].bits_per_pixel / 8;
+		pixel_data_mask = format_info_list[format_index].planes[p].data_mask;
 		for (y = 0; y < h; ++y) {
 			for (x = 0; x < w; ++x) {
 				idx = y * stride + x * bytes_per_pixel;
-				for (b = 0; b < bytes_per_pixel; ++b)
-					CHECK(pixel[idx + b] == (uint8_t)(y ^ x ^ b));
+				for (b = 0; b < bytes_per_pixel; ++b) {
+					byte_mask = pixel_data_mask >> (8 * b);
+					CHECK((pixel[idx + b] & byte_mask) == ((uint8_t)(y ^ x ^ b) & byte_mask));
+				}
 			}
 		}
 		gbm_bo_unmap(bo, map_data);
diff --git a/client/site_tests/graphics_Sanity/control b/client/site_tests/graphics_Sanity/control
index ccb2b87..67ee566 100644
--- a/client/site_tests/graphics_Sanity/control
+++ b/client/site_tests/graphics_Sanity/control
@@ -8,7 +8,8 @@
 CRITERIA = """
 This test fails if application screen shots cannot capture the screen output.
 """
-ATTRIBUTES = ("suite:bvt-cq, suite:graphics, suite:graphics_per-day,"
+#TODO(pwang): crbug.com/948506, change suite:bvt-perbuild back to bvt-cq.
+ATTRIBUTES = ("suite:bvt-perbuild, suite:graphics, suite:graphics_per-day,"
               "suite:graphics_system")
 TIME='SHORT'
 TEST_CATEGORY = 'Functional'
diff --git a/client/site_tests/graphics_dEQP/graphics_dEQP.py b/client/site_tests/graphics_dEQP/graphics_dEQP.py
index 406cd49..d162df9 100644
--- a/client/site_tests/graphics_dEQP/graphics_dEQP.py
+++ b/client/site_tests/graphics_dEQP/graphics_dEQP.py
@@ -21,8 +21,7 @@
 
 
 class graphics_dEQP(graphics_utils.GraphicsTest):
-    """Run the drawElements Quality Program test suite.
-    """
+    """Run the drawElements Quality Program test suite."""
     version = 1
     _services = None
     _hasty = False
@@ -69,7 +68,8 @@
         self._env = os.environ.copy()
         old_ld_path = self._env.get('LD_LIBRARY_PATH', '')
         if old_ld_path:
-            self._env['LD_LIBRARY_PATH'] = '/usr/local/lib:/usr/local/lib64:' + old_ld_path
+            self._env[
+                'LD_LIBRARY_PATH'] = '/usr/local/lib:/usr/local/lib64:' + old_ld_path
         else:
             self._env['LD_LIBRARY_PATH'] = '/usr/local/lib:/usr/local/lib64'
 
@@ -83,8 +83,19 @@
             self._services.restore_services()
         super(graphics_dEQP, self).cleanup()
 
-    def _parse_test_results(self, result_filename,
-                            test_results=None, failing_test=None):
+    def _archive_test_results(self, result_filename):
+        """Reduce space usage.
+
+        The default /tmp result file location is memory backed and capped at 1/2
+        of main memory. We have experienced out of storage situations. Avoid
+        this for instance by using compression.
+        """
+        os.system('gzip %s' % result_filename)
+
+    def _parse_test_results(self,
+                            result_filename,
+                            test_results=None,
+                            failing_test=None):
         """Handles result files with one or more test results.
 
         @param result_filename: log file to parse.
@@ -103,7 +114,7 @@
             test_results = {}
 
         if not os.path.isfile(result_filename):
-            failing_test.append(test_case)
+            logging.error('Did not find file %s', result_filename)
             return test_results
 
         with open(result_filename) as result_file:
@@ -137,7 +148,7 @@
                         xml_complete = False
                     test_results[result] = test_results.get(result, 0) + 1
                     if (result.lower() not in self.TEST_RESULT_FILTER and
-                        failing_test != None):
+                            failing_test != None):
                         failing_test.append(test_case)
                     xml_bad = False
                     xml_start = False
@@ -156,7 +167,7 @@
         for subset_file in subset_paths:
             # Filter against extra hasty failures only in hasty mode.
             if (not '.Pass.bz2' in subset_file and
-               (self._hasty or '.hasty.' not in subset_file)):
+                (self._hasty or '.hasty.' not in subset_file)):
                 not_passing_cases.extend(
                     bz2.BZ2File(subset_file).read().splitlines())
         not_passing_cases.sort()
@@ -220,22 +231,23 @@
         command = ('%s '
                    '--deqp-runmode=txt-caselist '
                    '--deqp-surface-type=%s '
-                   '--deqp-gl-config-name=rgba8888d24s8ms0 ' % (executable,
-                                                                self._surface))
+                   '--deqp-gl-config-name=rgba8888d24s8ms0 ' %
+                   (executable, self._surface))
         logging.info('Running command %s', command)
-        utils.run(command,
-                  env=self._env,
-                  timeout=60,
-                  stderr_is_expected=False,
-                  ignore_status=False,
-                  stdin=None)
+        utils.run(
+            command,
+            env=self._env,
+            timeout=60,
+            stderr_is_expected=False,
+            ignore_status=False,
+            stdin=None)
 
         # Now read this caselist file.
         caselist_name = '%s-cases.txt' % test_filter.split('.')[0]
         caselist_file = os.path.join(os.path.dirname(executable), caselist_name)
         if not os.path.isfile(caselist_file):
-            raise error.TestFail('Failed: No caselist file at %s!' %
-                                 caselist_file)
+            raise error.TestFail(
+                'Failed: No caselist file at %s!' % caselist_file)
 
         # And remove non-Pass'ing expectations from caselist.
         caselist = open(caselist_file).read().splitlines()
@@ -250,7 +262,8 @@
 
         test_cases = list(set(test_cases) - set(not_passing_cases))
         if not test_cases:
-            raise error.TestFail('Failed: Unable to bootstrap %s!' % test_filter)
+            raise error.TestFail(
+                'Failed: Unable to bootstrap %s!' % test_filter)
 
         test_cases.sort()
         return test_cases
@@ -263,7 +276,9 @@
         return []
 
     def _get_test_cases(self, test_filter, subset):
-        """Gets the test cases for 'Pass', 'Fail' etc. expectations.
+        """Gets the test cases for 'Pass', 'Fail' etc.
+
+        expectations.
 
         This function supports bootstrapping of new GPU families and dEQP
         binaries. In particular if there are not 'Pass' expectations found for
@@ -286,8 +301,8 @@
                 # for trouble (stability). Decide if it should be disallowed.
                 return self._load_not_passing_cases(test_filter)
             if subset != 'Pass':
-                raise error.TestFail('Failed: No subset file found for %s!' %
-                                     subset_path)
+                raise error.TestFail(
+                    'Failed: No subset file found for %s!' % subset_path)
             # Ask dEQP for all cases and remove the failing ones.
             return self._bootstrap_new_test_cases(test_filter)
 
@@ -334,14 +349,9 @@
                            '--deqp-watchdog=enable '
                            '--deqp-surface-width=%d '
                            '--deqp-surface-height=%d '
-                           '--deqp-log-filename=%s' % (
-                               executable,
-                               test_case,
-                               self._surface,
-                               width,
-                               height,
-                               log_file)
-                           )
+                           '--deqp-log-filename=%s' %
+                           (executable, test_case, self._surface, width, height,
+                            log_file))
                 logging.debug('Running single: %s', command)
 
                 # Must be in the executable directory when running for it to find it's
@@ -353,14 +363,15 @@
                 run_result = {}
                 start_time = time.time()
                 try:
-                    run_result = utils.run(command,
-                                           env=self._env,
-                                           timeout=self._timeout,
-                                           stderr_is_expected=False,
-                                           ignore_status=True)
+                    run_result = utils.run(
+                        command,
+                        env=self._env,
+                        timeout=self._timeout,
+                        stderr_is_expected=False,
+                        ignore_status=True)
                     result_counts = self._parse_test_results(
-                        log_file,
-                        failing_test=failing_test)
+                        log_file, failing_test=failing_test)
+                    self._archive_test_results(log_file)
                     if result_counts:
                         result = result_counts.keys()[0]
                     else:
@@ -430,11 +441,11 @@
         # in smaller batches. We start and end at multiples of batch_size
         # boundaries.
         shard_start = self._hasty_batch_size * (
-            (self._shard_number * (num_test_cases / self._shard_count)) /
-            self._hasty_batch_size)
-        shard_end = self._hasty_batch_size * ((
-            (self._shard_number + 1) * (num_test_cases / self._shard_count)) /
-                                              self._hasty_batch_size)
+            (self._shard_number *
+             (num_test_cases / self._shard_count)) / self._hasty_batch_size)
+        shard_end = self._hasty_batch_size * (
+            ((self._shard_number + 1) *
+             (num_test_cases / self._shard_count)) / self._hasty_batch_size)
         # The last shard will be slightly larger than the others. Extend it to
         # cover all test cases avoiding rounding problems with the integer
         # arithmetics done to compute shard_start and shard_end.
@@ -452,8 +463,8 @@
                              batch_cases)
             else:
                 executable = self._get_executable(api)
-                log_file = os.path.join(self._log_path,
-                                        '%s_hasty_%d.log' % (self._filter, batch))
+                log_file = os.path.join(
+                    self._log_path, '%s_hasty_%d.log' % (self._filter, batch))
                 command = ('%s '
                            '--deqp-stdin-caselist '
                            '--deqp-surface-type=%s '
@@ -463,13 +474,8 @@
                            '--deqp-watchdog=enable '
                            '--deqp-surface-width=%d '
                            '--deqp-surface-height=%d '
-                           '--deqp-log-filename=%s' % (
-                               executable,
-                               self._surface,
-                               width,
-                               height,
-                               log_file)
-                           )
+                           '--deqp-log-filename=%s' %
+                           (executable, self._surface, width, height, log_file))
 
                 logging.info('Running tests %d...%d out of %d:\n%s\n%s',
                              batch + 1, batch_to, num_test_cases, command,
@@ -480,22 +486,25 @@
                 os.chdir(os.path.dirname(executable))
 
                 try:
-                    utils.run(command,
-                              env=self._env,
-                              timeout=batch_timeout,
-                              stderr_is_expected=False,
-                              ignore_status=False,
-                              stdin=batch_cases)
+                    utils.run(
+                        command,
+                        env=self._env,
+                        timeout=batch_timeout,
+                        stderr_is_expected=False,
+                        ignore_status=False,
+                        stdin=batch_cases)
                 except Exception:
                     pass
                 # We are trying to handle all errors by parsing the log file.
                 results = self._parse_test_results(log_file, results,
                                                    failing_test)
+                self._archive_test_results(log_file)
                 logging.info(results)
         return results
 
     def _run_once(self, test_cases):
         """Run dEQP test_cases in individual/hasty mode.
+
         @param test_cases: test cases to run.
         """
         failing_test = []
@@ -509,18 +518,19 @@
         return test_results, failing_test
 
     def run_once(self, opts=None):
-        options = dict(filter='',
-                       test_names='',  # e.g., dEQP-GLES3.info.version,
-                                       # dEQP-GLES2.functional,
-                                       # dEQP-GLES3.accuracy.texture, etc.
-                       test_names_file='',
-                       timeout=self._timeout,
-                       subset_to_run='Pass',  # Pass, Fail, Timeout, NotPass...
-                       hasty='False',
-                       shard_number='0',
-                       shard_count='1',
-                       debug='False',
-                       perf_failure_description=None)
+        options = dict(
+            filter='',
+            test_names='',  # e.g., dEQP-GLES3.info.version,
+            # dEQP-GLES2.functional,
+            # dEQP-GLES3.accuracy.texture, etc.
+            test_names_file='',
+            timeout=self._timeout,
+            subset_to_run='Pass',  # Pass, Fail, Timeout, NotPass...
+            hasty='False',
+            shard_number='0',
+            shard_count='1',
+            debug='False',
+            perf_failure_description=None)
         if opts is None:
             opts = []
         options.update(utils.args_to_dict(opts))
@@ -551,8 +561,8 @@
         # Create a place to put detailed test output logs.
         filter_name = self._filter or os.path.basename(self._test_names_file)
         logging.info('dEQP test filter = %s', filter_name)
-        self._log_path = os.path.join(tempfile.gettempdir(), '%s-logs' %
-                                                             filter_name)
+        self._log_path = os.path.join(tempfile.gettempdir(),
+                                      '%s-logs' % filter_name)
         shutil.rmtree(self._log_path, ignore_errors=True)
         os.mkdir(self._log_path)
 
@@ -581,19 +591,19 @@
         # Rerun the test if we are in hasty mode.
         if self._hasty and len(failing_test) > 0:
             if len(failing_test) < sum(test_results.values()) * RERUN_RATIO:
-                logging.info("Because we are in hasty mode, we will rerun the "
-                             "failing tests one at a time")
+                logging.info('Because we are in hasty mode, we will rerun the '
+                             'failing tests one at a time')
                 rerun_results, failing_test = self._run_once(failing_test)
                 # Update failing test result from the test_results
                 for result in test_results:
                     if result.lower() not in self.TEST_RESULT_FILTER:
                         test_results[result] = 0
                 for result in rerun_results:
-                    test_results[result] = (test_results.get(result, 0) +
-                                            rerun_results[result])
+                    test_results[result] = (
+                        test_results.get(result, 0) + rerun_results[result])
             else:
-                logging.info("There are too many failing tests. It would "
-                             "take too long to rerun them. Giving up.")
+                logging.info('There are too many failing tests. It would '
+                             'take too long to rerun them. Giving up.')
 
         # Update failing tests to the chrome perf dashboard records.
         for test_case in test_cases:
@@ -631,5 +641,5 @@
             raise error.TestFail('Failed: on %s %d/%d tests failed.' %
                                  (self._gpu_type, test_failures, test_count))
         if test_skipped > 0:
-            logging.info('On %s %d tests skipped, %d passes' %
-                         (self._gpu_type, test_skipped, test_passes))
+            logging.info('On %s %d tests skipped, %d passes', self._gpu_type,
+                         test_skipped, test_passes)
diff --git a/client/site_tests/login_CryptohomeDataLeak/login_CryptohomeDataLeak.py b/client/site_tests/login_CryptohomeDataLeak/login_CryptohomeDataLeak.py
index 1a353cf..19fd5e4 100644
--- a/client/site_tests/login_CryptohomeDataLeak/login_CryptohomeDataLeak.py
+++ b/client/site_tests/login_CryptohomeDataLeak/login_CryptohomeDataLeak.py
@@ -30,7 +30,7 @@
             test_file =  '/home/.shadow/%s/mount/hello' \
                          % cryptohome.get_user_hash(username)
 
-            logging.info("Test file: ", test_file)
+            logging.info("Test file: %s", test_file)
             open(test_file, 'w').close()
 
         if cryptohome.is_vault_mounted(user=username, allow_fail=True):
diff --git a/client/site_tests/network_ShillStability/control b/client/site_tests/network_ShillStability/control
index c302069..da0bd9e 100644
--- a/client/site_tests/network_ShillStability/control
+++ b/client/site_tests/network_ShillStability/control
@@ -5,8 +5,7 @@
 AUTHOR = 'kirtika'
 NAME = 'network_ShillStability'
 ATTRIBUTES = ('suite:network_nightly, suite:wifi_matfunc,'
-              'suite:wifi_matfunc_noservo, suite:bvt-perbuild,'
-              'suite:wificell-pre-cq')
+              'suite:bvt-perbuild, suite:wificell-pre-cq')
 TIME = 'SHORT'
 TEST_TYPE = 'client'
 
diff --git a/client/site_tests/network_WlanDriver/control b/client/site_tests/network_WlanDriver/control
index 73adfa3..9d1b5e2 100644
--- a/client/site_tests/network_WlanDriver/control
+++ b/client/site_tests/network_WlanDriver/control
@@ -5,8 +5,7 @@
 AUTHOR = 'pstew, quiche, wiley'
 NAME = 'network_WlanDriver'
 ATTRIBUTES = ('suite:network_nightly, suite:wifi_matfunc,'
-              'suite:wifi_matfunc_noservo, suite:bvt-perbuild,'
-              'suite:wificell-pre-cq')
+              'suite:bvt-perbuild, suite:wificell-pre-cq')
 TIME = 'SHORT'
 TEST_TYPE = 'client'
 
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control b/client/site_tests/policy_ArcVideoCaptureAllowed/control
new file mode 100644
index 0000000..99c7238
--- /dev/null
+++ b/client/site_tests/policy_ArcVideoCaptureAllowed/control
@@ -0,0 +1,20 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_ArcVideoCaptureAllowed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+DEPENDENCIES = "arc"
+
+DOC = '''
+Verify that the ArcVideoCaptureAllowed ChromeOS Policy propagates to the ARC
+clouddpc setting."
+'''
+
+args_dict = utils.args_to_dict(args)
+
+job.run_test('policy_ArcVideoCaptureAllowed', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control.allowed b/client/site_tests/policy_ArcVideoCaptureAllowed/control.allowed
new file mode 100644
index 0000000..68b2b7d
--- /dev/null
+++ b/client/site_tests/policy_ArcVideoCaptureAllowed/control.allowed
@@ -0,0 +1,21 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_ArcVideoCaptureAllowed.allow'
+ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+DEPENDENCIES = "arc"
+
+DOC = '''
+Verify when the 'ArcVideoCaptureAllowed' policy is set to True (allow) the ARC
+Camera within the ARC container can be launched.
+
+'''
+
+job.run_test('policy_ArcVideoCaptureAllowed',
+             case=True)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control.disable b/client/site_tests/policy_ArcVideoCaptureAllowed/control.disable
new file mode 100644
index 0000000..217cd20
--- /dev/null
+++ b/client/site_tests/policy_ArcVideoCaptureAllowed/control.disable
@@ -0,0 +1,21 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_ArcVideoCaptureAllowed.disable'
+ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+DEPENDENCIES = "arc"
+
+DOC = '''
+Verify when the 'ArcVideoCaptureAllowed' policy is set to False (disable) the ARC
+Camera within the ARC container can not be launched.
+
+'''
+
+job.run_test('policy_ArcVideoCaptureAllowed',
+             case=False)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control.not_set b/client/site_tests/policy_ArcVideoCaptureAllowed/control.not_set
new file mode 100644
index 0000000..6d2ab61
--- /dev/null
+++ b/client/site_tests/policy_ArcVideoCaptureAllowed/control.not_set
@@ -0,0 +1,21 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_ArcVideoCaptureAllowed.not_set'
+ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+DEPENDENCIES = "arc"
+
+DOC = '''
+Verify when the 'ArcVideoCaptureAllowed' policy is set not set (None) the ARC
+Camera within the ARC container can be launched.
+
+'''
+
+job.run_test('policy_ArcVideoCaptureAllowed',
+             case=None)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/policy_ArcVideoCaptureAllowed.py b/client/site_tests/policy_ArcVideoCaptureAllowed/policy_ArcVideoCaptureAllowed.py
new file mode 100644
index 0000000..d8d30f2
--- /dev/null
+++ b/client/site_tests/policy_ArcVideoCaptureAllowed/policy_ArcVideoCaptureAllowed.py
@@ -0,0 +1,94 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+
+from autotest_lib.client.common_lib.cros import arc
+from autotest_lib.client.cros.enterprise import enterprise_policy_base
+
+
+class policy_ArcVideoCaptureAllowed(
+        enterprise_policy_base.EnterprisePolicyTest):
+    """
+    Test effect of the ArcVideoCaptureAllowed ChromeOS policy on ARC.
+
+    This test will launch the ARC container via the ArcEnabled policy, then
+    will check the behavior of the passthrough policy VideoCaptureAllowed.
+
+    When the policy is set to False, Video Capture is not allowed. To test
+    this, we will attemp to launch the ARC Camera, and check the logs to see
+    if the Camera was launched or not.
+
+    """
+    version = 1
+
+    def _test_Arc_cam_status(self, case):
+        """
+        Test if the Arc Camera has been opened, or not.
+
+        @param case: bool, value of the VideoCaptureAllowed policy.
+
+        """
+
+        #  The Camera can take a few seconds to respond, wait for it.
+        utils.poll_for_condition(
+            lambda: self.did_cam_app_respond(),
+            exception=error.TestFail('Camera APP did not respond.'),
+            timeout=10,
+            sleep_interval=1,
+            desc='Wait for Camera to respond.')
+
+        #  Once the Camera is open, get the status from logcat.
+        cam_device_resp, disabled_resp = self._check_cam_status()
+
+        if case or case is None:
+            if 'opened successfully' not in cam_device_resp or disabled_resp:
+                raise error.TestFail(
+                    'Camera did not launch when it should have.')
+        else:
+            if ('opened successfully' in cam_device_resp or
+                'disabled by policy' not in disabled_resp):
+                raise error.TestFail(
+                    'Camera did launch when it should not have.')
+
+    def _launch_Arc_Cam(self):
+        """Grant the Camera location permission, and launch the Camera app."""
+        arc.adb_shell('pm grant com.google.android.GoogleCameraArc android.permission.ACCESS_COARSE_LOCATION')
+        arc.adb_shell('am start -a android.media.action.IMAGE_CAPTURE')
+
+    def _check_cam_status(self):
+        """Returns the specified section from loggcat."""
+        return [arc.adb_shell("logcat -d | grep 'Camera device'"),
+                arc.adb_shell("logcat -d | grep 'CAMERA_DISABLED'")]
+
+    def did_cam_app_respond(self):
+        """
+        Check if the Camera app has responded to the start command via
+        data in the logs being populated.
+
+        @return: True/False, if the Camera has responded to the start command.
+
+        """
+
+        cam_logs = self._check_cam_status()
+        if cam_logs[0] or cam_logs[1]:
+            return True
+        return False
+
+    def run_once(self, case):
+        """
+        Setup and run the test configured for the specified test case.
+
+        @param case: Name of the test case to run.
+
+        """
+        pol = {'ArcEnabled': True,
+               'VideoCaptureAllowed': case}
+
+        self.setup_case(user_policies=pol,
+                        arc_mode='enabled',
+                        use_clouddpc_test=False)
+        self._launch_Arc_Cam()
+        self._test_Arc_cam_status(case)
diff --git a/client/site_tests/policy_PrintingEnabled/control b/client/site_tests/policy_PrintingEnabled/control
new file mode 100644
index 0000000..588bcd5
--- /dev/null
+++ b/client/site_tests/policy_PrintingEnabled/control
@@ -0,0 +1,19 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_PrintingEnabled'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+
+DOC = '''
+Verify effects of policy_PrintingEnabled policy.
+
+'''
+
+args_dict = utils.args_to_dict(args)
+
+job.run_test('policy_PrintingEnabled', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_PrintingEnabled/control.disabled b/client/site_tests/policy_PrintingEnabled/control.disabled
new file mode 100644
index 0000000..b1265ee
--- /dev/null
+++ b/client/site_tests/policy_PrintingEnabled/control.disabled
@@ -0,0 +1,19 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_PrintingEnabled.disabled'
+ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+
+DOC = '''
+Verify when the 'PrintingEnabled' is disabled (False), the policy will disable
+printing inside the Chrome browser.
+
+'''
+
+job.run_test('policy_PrintingEnabled', case=False)
diff --git a/client/site_tests/policy_PrintingEnabled/control.enabled b/client/site_tests/policy_PrintingEnabled/control.enabled
new file mode 100644
index 0000000..221d5b1
--- /dev/null
+++ b/client/site_tests/policy_PrintingEnabled/control.enabled
@@ -0,0 +1,19 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_PrintingEnabled.enabled'
+ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+
+DOC = '''
+Verify when the 'PrintingEnabled' is enabled (True), the policy will enable
+printing inside the Chrome browser.
+
+'''
+
+job.run_test('policy_PrintingEnabled', case=True)
diff --git a/client/site_tests/policy_PrintingEnabled/control.not_set b/client/site_tests/policy_PrintingEnabled/control.not_set
new file mode 100644
index 0000000..24bd76f
--- /dev/null
+++ b/client/site_tests/policy_PrintingEnabled/control.not_set
@@ -0,0 +1,19 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'policy_PrintingEnabled.not_set'
+ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'enterprise'
+TEST_TYPE = 'client'
+
+DOC = '''
+Verify when the 'PrintingEnabled' is not set (None), the policy will allow
+printing inside the Chrome browser.
+
+'''
+
+job.run_test('policy_PrintingEnabled', case=None)
diff --git a/client/site_tests/policy_PrintingEnabled/policy_PrintingEnabled.py b/client/site_tests/policy_PrintingEnabled/policy_PrintingEnabled.py
new file mode 100644
index 0000000..c0330a7
--- /dev/null
+++ b/client/site_tests/policy_PrintingEnabled/policy_PrintingEnabled.py
@@ -0,0 +1,77 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+
+from autotest_lib.client.cros.enterprise import enterprise_policy_base
+from autotest_lib.client.cros.input_playback import keyboard
+
+
+class policy_PrintingEnabled(
+        enterprise_policy_base.EnterprisePolicyTest):
+    """
+    Test effect of PrintingEnabled policy on Chrome OS.
+
+    The test will open a page, and attempt to 'print' the page as a local
+    PDF. If PDF is present after the print, printing is Enabled. If not,
+    printing is disabled.
+
+    """
+    version = 1
+
+    POLICY_NAME = 'PrintingEnabled'
+
+    def _input_key(self, key):
+        """
+        Press the key specified, wait a short time for the page to respond.
+
+        There is a 2 second wait for each button push to allow the dialog to
+        open, and load before proceeding. Because this dialog is not part of
+        the controllable telemetry view, there is not an easier/better way to
+        know if the dialog is fully loaded or not.
+
+        @key: string of the key(s) to press"""
+
+        self.keyboard.press_key(key)
+        time.sleep(2)
+
+    def _print_check(self, case):
+        """
+        Navigates to the chrome://policy page, and will check to see if the
+        print button is enabled/blocked.
+
+        @param case: bool or None, the setting of the PrintingEnabled Policy
+
+        """
+        self.navigate_to_url('chrome://policy')
+
+        # Open the print page, and hit enter to print, and save (as local pdf).
+        self._input_key('ctrl+p')
+        self._input_key('enter')
+        self._input_key('enter')
+
+        download_dur = utils.system_output('ls /home/chronos/user/Downloads/')
+
+        if case or case is None:
+            if 'Policies' not in download_dur:
+                raise error.TestError('Printing not enabled when it should be')
+        else:
+            if 'Policies' in download_dur:
+                raise error.TestError('Printing enabled when it should not be')
+
+    def run_once(self, case):
+        """
+        Entry point of the test.
+
+        @param case: Name of the test case to run.
+
+        """
+        self.keyboard = keyboard.Keyboard()
+        self.setup_case(user_policies={'PrintingEnabled': case},
+                        disable_default_apps=False)
+
+        self._print_check(case)
+        self.keyboard.close()
diff --git a/client/site_tests/policy_ScreenBrightnessPercent/control b/client/site_tests/policy_ScreenBrightnessPercent/control
index 0b84317..9782719 100644
--- a/client/site_tests/policy_ScreenBrightnessPercent/control
+++ b/client/site_tests/policy_ScreenBrightnessPercent/control
@@ -8,6 +8,7 @@
 TEST_CATEGORY = 'General'
 TEST_CLASS = 'enterprise'
 TEST_TYPE = 'client'
+DEPENDENCIES = 'internal_display'
 
 DOC = '''
 Verify effects of ScreenBrightnessPercent policy.
diff --git a/client/site_tests/policy_ScreenBrightnessPercent/control.1 b/client/site_tests/policy_ScreenBrightnessPercent/control.1
index 281ad5e..fae16eb 100644
--- a/client/site_tests/policy_ScreenBrightnessPercent/control.1
+++ b/client/site_tests/policy_ScreenBrightnessPercent/control.1
@@ -5,6 +5,7 @@
 AUTHOR = 'dbeckett'
 NAME = 'policy_ScreenBrightnessPercent.1'
 ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+DEPENDENCIES = 'internal_display'
 TIME = 'SHORT'
 TEST_CATEGORY = 'General'
 TEST_CLASS = 'enterprise'
diff --git a/client/site_tests/policy_ScreenBrightnessPercent/control.100 b/client/site_tests/policy_ScreenBrightnessPercent/control.100
index b977228..7c3bd1e 100644
--- a/client/site_tests/policy_ScreenBrightnessPercent/control.100
+++ b/client/site_tests/policy_ScreenBrightnessPercent/control.100
@@ -5,6 +5,7 @@
 AUTHOR = 'dbeckett'
 NAME = 'policy_ScreenBrightnessPercent.100'
 ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+DEPENDENCIES = 'internal_display'
 TIME = 'SHORT'
 TEST_CATEGORY = 'General'
 TEST_CLASS = 'enterprise'
diff --git a/client/site_tests/policy_ScreenBrightnessPercent/control.50 b/client/site_tests/policy_ScreenBrightnessPercent/control.50
index 3c2d128..03a2e77 100644
--- a/client/site_tests/policy_ScreenBrightnessPercent/control.50
+++ b/client/site_tests/policy_ScreenBrightnessPercent/control.50
@@ -5,6 +5,7 @@
 AUTHOR = 'dbeckett'
 NAME = 'policy_ScreenBrightnessPercent.50'
 ATTRIBUTES = 'suite:ent-nightly, suite:policy'
+DEPENDENCIES = 'internal_display'
 TIME = 'SHORT'
 TEST_CATEGORY = 'General'
 TEST_CLASS = 'enterprise'
diff --git a/client/site_tests/power_LoadTest/control.fast b/client/site_tests/power_LoadTest/control.fast
index 40ae946..753998c 100644
--- a/client/site_tests/power_LoadTest/control.fast
+++ b/client/site_tests/power_LoadTest/control.fast
@@ -23,19 +23,16 @@
 It ignores whether wired access (check_network=False) or AC is
 connected(ac_ok=True).
 
-For reasons above and reduced runtime (3min) it will NOT produce valid power
+For reasons above and reduced runtime (5min) it will NOT produce valid power
 consumption results for the 60/20/10/10 load and therefore should NOT be used
 for any battery life estimations.
 """
-
-# TODO (bleung): Find a way to do automatic Facebook login for test account.
-# TODO (tbroch): Find way to not replicate all these parameters that are common
-# between this control file and the original
-loop_time = 180
+loop_time = 300
 loop_count = 1
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
 job.run_test('power_LoadTest', loop_time=loop_time, loop_count=loop_count,
              test_low_batt_p=6, check_network=False, ac_ok=True,
-             gaia_login=False, tag=NAME.split('.')[1], pdash_note=pdash_note)
+             scroll_interval_ms=1000, gaia_login=False, tag=NAME.split('.')[1],
+             pdash_note=pdash_note)
diff --git a/client/site_tests/power_LoadTest/extension/ct.js b/client/site_tests/power_LoadTest/extension/ct.js
index ab9df8c..acea6c4 100644
--- a/client/site_tests/power_LoadTest/extension/ct.js
+++ b/client/site_tests/power_LoadTest/extension/ct.js
@@ -23,7 +23,7 @@
     if (response && response.should_scroll) {
       window.focus();
       lastOffset = window.pageYOffset;
-      var start_interval = Math.max(10000, response.scroll_interval);
+      var start_interval = Math.max(1000, response.scroll_interval);
       function smoothScrollDown() {
         report_scrolling_to_test();
         window.scrollBy(0, response.scroll_by);
diff --git a/client/site_tests/power_VideoPlayback/power_VideoPlayback.py b/client/site_tests/power_VideoPlayback/power_VideoPlayback.py
index 1ff8b3d..e81e5c3 100644
--- a/client/site_tests/power_VideoPlayback/power_VideoPlayback.py
+++ b/client/site_tests/power_VideoPlayback/power_VideoPlayback.py
@@ -18,43 +18,57 @@
     """
     version = 1
 
+    _BASE_URL='http://commondatastorage.googleapis.com/chromiumos-test-assets-public/tast/cros/video/perf/'
+
     # list of video name and url.
     _VIDEOS = [
         ('h264_1080_30fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/traffic/traffic-1920x1080-8005020218f6b86bfa978e550d04956e.mp4'
+         _BASE_URL + 'h264/1080p_30fps_300frames_20181225.h264.mp4'
         ),
         ('h264_1080_60fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/Shaka-Dash/1080_60_10s_600frames-c80aeceeabfc9fc18ed2f98f219c85af.mp4'
+         _BASE_URL + 'h264/1080p_60fps_600frames_20181225.h264.mp4'
         ),
         ('h264_4k_30fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/traffic/traffic_3840x2160-32ec10f87ef369d0e5ec9c736d63cc58.mp4'
+         _BASE_URL + 'h264/2160p_30fps_300frames_20181225.h264.mp4'
         ),
         ('h264_4k_60fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/Shaka-Dash/h264_4k_60_10s_600frames-ab1bfb374d2e408aac4a1beaa1aa0817.mp4'
+         _BASE_URL + 'h264/2160p_60fps_600frames_20181225.h264.mp4'
         ),
         ('vp8_1080_30fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/traffic/traffic-1920x1080-ad53f821ff3cf8ffa7e991c9d2e0b854.vp8.webm'
+         _BASE_URL + 'vp8/1080p_30fps_300frames_20181225.vp8.webm'
         ),
         ('vp8_1080_60fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/Shaka-Dash/1080_60_10s_600frames_vp8-c190d557caaf415f762af911b41bc32b.webm'
+         _BASE_URL + 'vp8/1080p_60fps_600frames_20181225.vp8.webm'
         ),
         ('vp8_4k_30fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/Shaka-Dash/2160_vp8_600frames-3d61b1aed4e3f32249c7d324a809ef54.vp8.webm'
+         _BASE_URL + 'vp8/2160p_30fps_300frames_20181225.vp8.webm'
         ),
         ('vp8_4k_60fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/Shaka-Dash/vp8_4k_60_10s_600frames-b8d65f0eea64647be5413a75622abe79.webm'
+         _BASE_URL + 'vp8/2160p_60fps_600frames_20181225.vp8.webm'
         ),
         ('vp9_1080_30fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/traffic/traffic-1920x1080-83a1e5f8b7944577425f039034e64c76.vp9.webm'
+         _BASE_URL + 'vp9/1080p_30fps_300frames_20181225.vp9.webm'
         ),
         ('vp9_1080_60fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/video_tests/perf/fallout4_1080_hfr.vp9.webm'
+         _BASE_URL + 'vp9/1080p_60fps_600frames_20181225.vp9.webm'
         ),
         ('vp9_4k_30fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/traffic/traffic-3840x2160-cbcdda7d7143b3e9f8efbeed0c4157b5.vp9.webm'
+         _BASE_URL + 'vp9/2160p_30fps_300frames_20181225.vp9.webm'
         ),
         ('vp9_4k_60fps',
-         'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/Shaka-Dash/2160_60_10s_600frames-2fd17338cb4d9cfd9d7299a108ca9145.vp9.webm'
+         _BASE_URL + 'vp9/2160p_60fps_600frames_20181225.vp9.webm'
+        ),
+        ('av1_720_30fps',
+         _BASE_URL + 'av1/720p_30fps_300frames_20190305.av1.mp4'
+        ),
+        ('av1_720_60fps',
+         _BASE_URL + 'av1/720p_60fps_600frames_20190305.av1.mp4'
+        ),
+        ('av1_1080_30fps',
+         _BASE_URL + 'av1/1080p_30fps_300frames_20190305.av1.mp4'
+        ),
+        ('av1_1080_60fps',
+         _BASE_URL + 'av1/1080p_60fps_600frames_20190305.av1.mp4'
         ),
     ]
 
diff --git a/client/site_tests/security_ASLR/control b/client/site_tests/security_ASLR/control
deleted file mode 100644
index 6d05f80..0000000
--- a/client/site_tests/security_ASLR/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME = "SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = ("This test verifies that ASLR is working in important processes by"
-    "checking their memory mappings before and after restarting them.")
-NAME = "security_ASLR"
-PURPOSE = "To ensure ASLR is enabled for chrome and debugd"
-CRITERIA = (" Fail if a process can't restart or has address range starts"
-    "for any executable stack, or heap memory map to the same location"
-    "after restarting")
-ATTRIBUTES = "suite:bvt-inline, suite:smoke"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-job.run_test('security_ASLR')
diff --git a/client/site_tests/security_ASLR/security_ASLR.py b/client/site_tests/security_ASLR/security_ASLR.py
deleted file mode 100644
index 4d0e6f5..0000000
--- a/client/site_tests/security_ASLR/security_ASLR.py
+++ /dev/null
@@ -1,365 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A test verifying Address Space Layout Randomization
-
-Uses system calls to get important pids and then gets information about
-the pids in /proc/<pid>/maps. Restarts the tested processes and reads
-information about them again. If ASLR is enabled, memory mappings should
-change.
-"""
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import upstart
-
-import logging
-import time
-import pprint
-import re
-
-def _pidsof(exe_name):
-    """Returns the PIDs of processes with the given name as a list."""
-    output = utils.system_output('pidof %s' % exe_name,
-                                 ignore_status=True).strip()
-    return [int(pid) for pid in output.split()]
-
-
-class Process(object):
-    """Holds information about a process.
-
-    Stores basic information about a process. This class is a base for
-    UpstartProcess and SystemdProcess declared below.
-
-    Attributes:
-        _name: String name of process.
-        _service_name: Name of the service corresponding to the process.
-        _parent: String name of process's parent.
-    """
-
-    _START_POLL_INTERVAL_SECONDS = 1
-    _START_TIMEOUT = 30
-
-    def __init__(self, name, service_name, parent):
-        self._name = name
-        self._service_name = service_name
-        self._parent = parent
-
-    def get_name(self):
-        return self._name
-
-    def get_pid(self):
-        """Gets pid of process, waiting for it if not found.
-
-        Raises:
-            error.TestFail: corresponding process is not found.
-        """
-        retries = 0
-        ps_results = ""
-        while retries < self._START_TIMEOUT:
-            # Find all PIDs matching the expected parent name, then find all
-            # PIDs that have the expected process name and any of the parent
-            # PIDs. Only succeed when there is exactly one PID/PPID pairing.
-            # This is needed to handle cases where multiple processes share the
-            # expected parent name. See crbug.com/741110 for background.
-            ppids = _pidsof(self._parent)
-            pids = _pidsof(self._name)
-            if ppids and pids:
-                ps_command = ('ps h --ppid %s -o pid' %
-                              ','.join([str(pid) for pid in ppids]))
-                ps_results = utils.system_output(ps_command).strip()
-                child_pids = [int(pid) for pid in ps_results.split()]
-                intersection = set(child_pids).intersection(pids)
-                if len(intersection) == 1:
-                    return next(iter(intersection))
-                elif len(intersection) > 1:
-                    # More than one candidate process found - rather than pick
-                    # one arbitrarily, continue to wait. This is not expected -
-                    # but continuing to wait will avoid weird failures if some
-                    # time in the future there are multiple non-transient
-                    # parent/child processes with the same names.
-                    logging.debug("Found multiple processes for '%s'",
-                                  self._name)
-
-            # The process, or its parent, could not be found. We then sleep,
-            # hoping the process is just slow to initially start.
-            time.sleep(self._START_POLL_INTERVAL_SECONDS)
-            retries += 1
-
-        # We never saw the process, so abort with details on who was missing.
-        raise error.TestFail('Never saw a pid for "%s"' % (self._name))
-
-
-class UpstartProcess(Process):
-    """Represents an Upstart service."""
-
-    def __init__(self, name, service_name, parent='init'):
-        super(UpstartProcess, self).__init__(name, service_name, parent)
-
-    def exists(self):
-        """Checks if the service is present in Upstart configuration."""
-        return upstart.has_service(self._service_name)
-
-    def restart(self):
-        """Restarts the process via initctl."""
-        utils.system('initctl restart %s' % self._service_name)
-
-class SystemdProcess(Process):
-    """Represents an systemd service."""
-
-    def __init__(self, name, service_name, parent='systemd'):
-        super(SystemdProcess, self).__init__(name, service_name, parent)
-
-    def exists(self):
-        """Checks if the service is present in systemd configuration."""
-        cmd = 'systemctl show -p ActiveState %s.service' % self._service_name
-        output = utils.system_output(cmd, ignore_status=True).strip()
-        return output == 'ActiveState=active'
-
-    def restart(self):
-        """Restarts the process via systemctl."""
-        # Reset the restart rate counter each time before process restart to
-        # avoid systemd restart rate limiting.
-        utils.system('systemctl reset-failed %s' % self._service_name)
-        utils.system('systemctl restart %s' % self._service_name)
-
-class Mapping(object):
-    """Holds information about a process's address mapping.
-
-    Stores information about one memory mapping for a process.
-
-    Attributes:
-        _name: String name of process/memory occupying the location.
-        _start: String containing memory address range start.
-    """
-    def __init__(self, name, start):
-        self._start = start
-        self._name = name
-
-    def set_start(self, new_value):
-        self._start = new_value
-
-    def get_start(self):
-        return self._start
-
-    def __repr__(self):
-        return "<mapping %s %s>" % (self._name, self._start)
-
-
-class security_ASLR(test.test):
-    """Runs ASLR tests
-
-    See top document comments for more information.
-
-    Attributes:
-        version: Current version of the test.
-    """
-    version = 1
-
-    _TEST_ITERATION_COUNT = 5
-
-    _ASAN_SYMBOL = "__asan_init"
-
-    # 'update_engine' should at least be present on all boards.
-    _PROCESS_LIST = [UpstartProcess('chrome', 'ui', parent='session_manager'),
-                     UpstartProcess('debugd', 'debugd'),
-                     UpstartProcess('update_engine', 'update-engine'),
-                     SystemdProcess('update_engine', 'update-engine'),
-                     SystemdProcess('systemd-journald', 'systemd-journald'),]
-
-
-    def get_processes_to_test(self):
-        """Gets processes to test for main function.
-
-        Called by run_once to get processes for this program to test.
-        Filters binaries that actually exist on the system.
-        This has to be a method because it constructs process objects.
-
-        Returns:
-            A list of process objects to be tested (see below for
-            definition of process class).
-        """
-        return [p for p in self._PROCESS_LIST if p.exists()]
-
-
-    def running_on_asan(self):
-        """Returns whether we're running on ASan."""
-        # -q, --quiet         * Only output 'bad' things
-        # -F, --format <arg>  * Use specified format for output
-        # -g, --gmatch        * Use regex rather than string compare (with -s)
-        # -s, --symbol <arg>  * Find a specified symbol
-        scanelf_command = "scanelf -qF'%s#F'"
-        scanelf_command += " -gs %s `which debugd`" % self._ASAN_SYMBOL
-        symbol = utils.system_output(scanelf_command)
-        logging.debug("running_on_asan(): symbol: '%s', _ASAN_SYMBOL: '%s'",
-                      symbol, self._ASAN_SYMBOL)
-        return symbol != ""
-
-
-    def test_randomization(self, process):
-        """Tests ASLR of a single process.
-
-        This is the main test function for the program. It creates data
-        structures out of useful information from sampling /proc/<pid>/maps
-        after restarting the process and then compares address starting
-        locations of all executable, stack, and heap memory from each iteration.
-
-        @param process: a process object representing the process to be tested.
-
-        Returns:
-            A dict containing a Boolean for whether or not the test passed
-            and a list of string messages about passing/failing cases.
-        """
-        test_result = dict([('pass', True), ('results', []), ('cases', dict())])
-        name = process.get_name()
-        mappings = list()
-        pid = -1
-        for i in range(self._TEST_ITERATION_COUNT):
-            new_pid = process.get_pid()
-            if pid == new_pid:
-                raise error.TestFail(
-                    'Service "%s" retained PID %d after restart.' % (name, pid))
-            pid = new_pid
-            mappings.append(self.map(pid))
-            process.restart()
-        logging.debug('Complete mappings dump for process %s:\n%s',
-                      name, pprint.pformat(mappings, 4))
-
-        initial_map = mappings[0]
-        for i, mapping in enumerate(mappings[1:]):
-            logging.debug('Iteration %d', i)
-            for key in mapping.iterkeys():
-                # Set default case result to fail, pass when an address change
-                # occurs.
-                if not test_result['cases'].has_key(key):
-                    test_result['cases'][key] = dict([('pass', False),
-                            ('number', 0),
-                            ('total', self._TEST_ITERATION_COUNT)])
-                was_same = (initial_map.has_key(key) and
-                        initial_map[key].get_start() ==
-                        mapping[key].get_start())
-                if was_same:
-                    logging.debug("Bad: %s address didn't change", key)
-                else:
-                    logging.debug('Good: %s address changed', key)
-                    test_result['cases'][key]['number'] += 1
-                    test_result['cases'][key]['pass'] = True
-        for case, result in test_result['cases'].iteritems():
-            if result['pass']:
-                test_result['results'].append( '[PASS] Address for %s '
-                        'successfully changed' % case)
-            else:
-                test_result['results'].append('[FAIL] Address for %s had '
-                        'deterministic value: %s' % (case,
-                        mappings[0][case].get_start()))
-            test_result['pass'] = test_result['pass'] and result['pass']
-        return test_result
-
-
-    def map(self, pid):
-        """Creates data structure from table in /proc/<pid>/maps.
-
-        Gets all data from /proc/<pid>/maps, parses each entry, and saves
-        entries corresponding to executable, stack, or heap memory into
-        a dictionary.
-
-        @param pid: a string containing the pid to be tested.
-
-        Returns:
-            A dict mapping names to mapping objects (see above for mapping
-            definition).
-        """
-        memory_map = dict()
-        maps_file = open("/proc/%s/maps" % pid)
-        for maps_line in maps_file:
-            result = self.parse_result(maps_line)
-            if result is None:
-                continue
-            name = result['name']
-            start = result['start']
-            perms = result['perms']
-            is_memory = name == '[heap]' or name == '[stack]'
-            is_useful = re.search('x', perms) is not None or is_memory
-            if not is_useful:
-                continue
-            if not name in memory_map:
-                memory_map[name] = Mapping(name, start)
-            elif memory_map[name].get_start() < start:
-                memory_map[name].set_start(start)
-        return memory_map
-
-
-    def parse_result(self, result):
-        """Builds dictionary from columns of a line of /proc/<pid>/maps
-
-        Uses regular expressions to determine column separations. Puts
-        column data into a dict mapping column names to their string values.
-
-        @param result: one line of /proc/<pid>/maps as a string, for any <pid>.
-
-        Returns:
-            None if the regular expression wasn't matched. Otherwise:
-            A dict of string column names mapped to their string values.
-            For example:
-
-        {'start': '9e981700000', 'end': '9e981800000', 'perms': 'rwxp',
-            'something': '00000000', 'major': '00', 'minor': '00', 'inode':
-            '00'}
-        """
-        # Build regex to parse one line of proc maps table.
-        memory = r'(?P<start>\w+)-(?P<end>\w+)'
-        perms = r'(?P<perms>(r|-)(w|-)(x|-)(s|p))'
-        something = r'(?P<something>\w+)'
-        devices = r'(?P<major>\w+):(?P<minor>\w+)'
-        inode = r'(?P<inode>[0-9]+)'
-        name = r'(?P<name>([a-zA-Z0-9/]+|\[heap\]|\[stack\]))'
-        regex = r'%s +%s +%s +%s +%s +%s' % (memory, perms, something,
-            devices, inode, name)
-        found_match = re.match(regex, result)
-        if found_match is None:
-            return None
-        parsed_result = found_match.groupdict()
-        return parsed_result
-
-
-    def run_once(self):
-        """Main function.
-
-        Called when test is run. Gets processes to test and calls test on
-        them.
-
-        Raises:
-            error.TestFail if any processes' memory mapping addresses are the
-            same after restarting.
-        """
-
-        if self.running_on_asan():
-            logging.warning("security_ASLR is not available on ASan.")
-            return
-
-        processes = self.get_processes_to_test()
-        # If we don't find any of the processes we wanted to test, we fail.
-        if len(processes) == 0:
-            proc_names = ", ".join([p.get_name() for p in self._PROCESS_LIST])
-            raise error.TestFail(
-                'Could not find any of "%s" processes to test' % proc_names)
-
-        aslr_enabled = True
-        full_results = dict()
-        for process in processes:
-            test_results = self.test_randomization(process)
-            full_results[process.get_name()] = test_results['results']
-            if not test_results['pass']:
-                aslr_enabled = False
-
-        logging.debug('SUMMARY:')
-        for process_name, results in full_results.iteritems():
-            logging.debug('Results for %s:', process_name)
-            for result in results:
-                logging.debug(result)
-
-        if not aslr_enabled:
-            raise error.TestFail('One or more processes had deterministic '
-                    'memory mappings')
diff --git a/client/site_tests/security_Minijail0/control b/client/site_tests/security_Minijail0/control
deleted file mode 100644
index b1378b6..0000000
--- a/client/site_tests/security_Minijail0/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Tests features of the minijail0 tool.
-"""
-NAME = "security_Minijail0"
-PURPOSE = "Regression and integration tests of minijail0."
-CRITERIA = """
-Fail if any of the minijail0 features fail to isolate properly.
-"""
-ATTRIBUTES = "suite:bvt-inline, suite:smoke"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-job.run_test("security_Minijail0")
diff --git a/client/site_tests/security_Minijail0/security_Minijail0.py b/client/site_tests/security_Minijail0/security_Minijail0.py
deleted file mode 100644
index a009d10..0000000
--- a/client/site_tests/security_Minijail0/security_Minijail0.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import logging
-import os
-import re
-import shutil
-import tempfile
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class security_Minijail0(test.test):
-    """Tests features of the minijail0 tool."""
-    version = 1
-
-
-    def is_64bit(self):
-        return os.path.isdir('/lib64')
-
-
-    def get_test_option(self, handle, name):
-        setup = ''
-        for l in handle.readlines():
-            m = re.match('^# %s: (.*)' % name, l.strip())
-            if m:
-                setup = m.group(1)
-        return setup
-
-
-    def run_test(self, path, static):
-        # Tests are shell scripts with a magic comment line of the form '# args:
-        # <stuff>' in them. The <stuff> is substituted in here as minijail0
-        # arguments. They can also optionally contain a magic comment of the
-        # form '# setup: <stuff>', in which case <stuff> is executed as a shell
-        # command before running the test.
-        # Another optional magic comment of the form '# expected_ugid <uid>
-        # <gid>' is used when entering a new user namespace, where <uid> and
-        # <gid> are the expected uid and gid 'outside' the user namespace. If
-        # expected_ugid is set, a temporary directory is created, and a
-        # temporary file is passed to tests as first argument. Tests should
-        # 'touch' that file and its uid/gid will be checked outside the user
-        # namespace.
-        #
-        # If '%T' is present in either of the above magic comments, a temporary
-        # directory is created, and its name is substituted for '%T' in both of
-        # them.
-        # If '%S' is present in either of the above magic comments, it is
-        # replaced with src folder of these tests.
-        args = self.get_test_option(file(path), 'args')
-        setup = self.get_test_option(file(path), 'setup')
-        args64 = self.get_test_option(file(path), 'args64')
-        args32 = self.get_test_option(file(path), 'args32')
-        expugid = self.get_test_option(file(path), 'expected_ugid').split(" ")
-
-        td = None
-        if setup:
-            if '%T' in setup:
-                td = tempfile.mkdtemp()
-                setup = setup.replace('%T', td)
-            if '%S' in setup:
-                setup = setup.replace('%S', self.srcdir)
-            utils.system(setup)
-
-        if self.is_64bit() and args64:
-            args = args + ' ' + args64
-
-        if (not self.is_64bit()) and args32:
-            args = args + ' ' + args32
-
-        if '%T' in args:
-            td = td or tempfile.mkdtemp()
-            args = args.replace('%T', td)
-        if '%S' in args:
-            args = args.replace('%S', self.srcdir)
-
-        userns_td = None
-        userns_file = ''
-        if len(expugid) == 2:
-            expuid, expgid = expugid
-            userns_td = tempfile.mkdtemp()
-            os.chmod(userns_td, 0777)
-            userns_file = userns_td + '/userns'
-
-        if static:
-            ret = utils.system('/sbin/minijail0 %s %s/staticbashexec %s %s'
-                                % (args, self.srcdir, path, userns_file),
-                                ignore_status=True)
-        else:
-            ret = utils.system('/sbin/minijail0 %s /bin/bash %s %s'
-                                % (args, path, userns_file),
-                                ignore_status=True)
-        if ret == 0 and len(expugid) == 2:
-            stat = os.stat(userns_file)
-            if str(stat.st_uid) != expuid or str(stat.st_gid) != expgid:
-                ret = 1
-
-        if td:
-            # The test better not have polluted our mount namespace :).
-            shutil.rmtree(td)
-        if userns_td:
-            shutil.rmtree(userns_td)
-        return ret
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def run_once(self):
-        failed = []
-        ran = 0
-        for p in glob.glob('%s/test-*' % self.srcdir):
-            name = os.path.basename(p)
-            logging.info('Running: %s', name)
-            if self.run_test(p, static=False):
-                failed.append(name)
-            ran += 1
-            if self.run_test(p, static=True):
-                failed.append(name + ' static')
-            ran += 1
-
-        if ran == 0:
-            failed.append("No tests found in %s!" % (self.srcdir))
-        if failed:
-            logging.error('Failed: %s', failed)
-            raise error.TestFail('Failed: %s' % failed)
diff --git a/client/site_tests/security_Minijail0/src/Makefile b/client/site_tests/security_Minijail0/src/Makefile
deleted file mode 100644
index 47b4c5b..0000000
--- a/client/site_tests/security_Minijail0/src/Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-staticbashexec: staticbashexec.c
-	$(CC) staticbashexec.c -o staticbashexec $(CFLAGS) $(CPPFLAGS) $(LDFLAGS) -static
diff --git a/client/site_tests/security_Minijail0/src/common.sh b/client/site_tests/security_Minijail0/src/common.sh
deleted file mode 100644
index f0aa73c..0000000
--- a/client/site_tests/security_Minijail0/src/common.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-die () {
-  echo "$@"
-  exit 1
-}
-
-needuid () {
-  uid=$(id -ru)
-  [ "$uid" != "$1" ] && die "uid $uid != $1"
-}
-
-needeuid () {
-  euid=$(id -u)
-  [ "$euid" != "$1" ] && die "euid $euid != $1"
-}
-
-needgid () {
-  gid=$(id -rg)
-  [ "$gid" != "$1" ] && die "gid $gid != $1"
-}
-
-
-needegid () {
-  egid=$(id -g)
-  [ "$egid" != "$1" ] && die "egid $egid != $1"
-}
-
-needreuid () {
-  needuid "$1"
-  needeuid "$1"
-}
-
-needregid () {
-  needgid "$1"
-  needegid "$1"
-}
diff --git a/client/site_tests/security_Minijail0/src/mountns-enter-child.py b/client/site_tests/security_Minijail0/src/mountns-enter-child.py
deleted file mode 100644
index dd98d9f..0000000
--- a/client/site_tests/security_Minijail0/src/mountns-enter-child.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-# Parent passes test file path as first argument.
-test_file = sys.argv[1]
-
-# We entered a mount namespace where |test_file| should not be accessible.
-if os.access(test_file, os.F_OK):
-    sys.exit(1)
diff --git a/client/site_tests/security_Minijail0/src/mountns-enter.py b/client/site_tests/security_Minijail0/src/mountns-enter.py
deleted file mode 100644
index aa0aeb8..0000000
--- a/client/site_tests/security_Minijail0/src/mountns-enter.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import subprocess
-import sys
-import tempfile
-
-# Parent passes base path as first argument.
-child_path = os.path.join(sys.argv[1], "mountns-enter-child.py")
-
-# Mount tmpfs.
-tmpdir = tempfile.mkdtemp(prefix="newns-", dir="/tmp")
-ret = subprocess.check_call(["mount", "tmpfs", tmpdir, "-t", "tmpfs"])
-test_file = os.path.join(tmpdir, "test")
-with open(test_file, "w") as t:
-    print >> t, "test"
-
-# Exec child and enter existing mount namespace.
-ret = subprocess.call(["/sbin/minijail0", "-V", "/proc/1/ns/mnt", "--",
-                       sys.executable, child_path, test_file])
-
-# Clean up.
-subprocess.check_call("umount %s" % tmpdir, shell=True)
-os.rmdir(tmpdir)
-
-# Return child's exit status.
-sys.exit(ret)
diff --git a/client/site_tests/security_Minijail0/src/staticbashexec.c b/client/site_tests/security_Minijail0/src/staticbashexec.c
deleted file mode 100644
index a558839..0000000
--- a/client/site_tests/security_Minijail0/src/staticbashexec.c
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright 2014 The Chromium OS Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-#include <stdio.h>
-#include <unistd.h>
-
-int main(int argc, char **argv)
-{
-	return execv("/bin/bash", argv);
-}
diff --git a/client/site_tests/security_Minijail0/src/test-caps b/client/site_tests/security_Minijail0/src/test-caps
deleted file mode 100644
index e83c390..0000000
--- a/client/site_tests/security_Minijail0/src/test-caps
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -u 1000 -g 1000 -c 2 --ambient
-# note that 2 = CAP_DAC_OVERRIDE
-
-. "$(dirname "$0")"/common.sh
-
-needreuid 1000
-needregid 1000
-
-# Test we've kept CAP_DAC_OVERRIDE.
-[ ! -w "$0" ] && die "Can't write to '$0'"
-
-# Test we've lost everything else.
-mismatch=""
-matched=0
-while read line; do
-    if echo "$line" | grep -q '^CapEff:'; then
-        caps=$(echo "$line" | awk '{print $NF}')
-        if echo "$caps" | grep -q '^0000000000000002$'; then
-            matched=1
-        else
-            mismatch="$caps"
-        fi
-    fi
-done </proc/self/status
-[ $matched -eq 1 ] || die "Did not drop non-CAP_DAC_OVERRIDE caps: $mismatch"
-
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-chroot b/client/site_tests/security_Minijail0/src/test-chroot
deleted file mode 100644
index 8b56091..0000000
--- a/client/site_tests/security_Minijail0/src/test-chroot
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# setup: mkdir -p "%T/c/bin" "%T/c/lib64" "%T/c/lib" "%T/c/usr/lib" "%T/c/usr/local" "%T/c/tmp-rw" "%T/c/tmp-ro" "%T/tmp"
-# args: -b /bin,/bin -b /lib,/lib -b /usr/lib,/usr/lib -b /usr/local,/usr/local -b %T/tmp,/tmp-rw,1 -b %T/tmp,/tmp-ro -C "%T/c" -v
-# args64: -b /lib64,/lib64
-
-# Can't get at common.sh from here... oops :)
-die () {
-  echo "$@"
-  exit 1
-}
-
-wd=$(pwd)
-[ "$wd" != "/" ] && die "not in /"
-[ ! -d /lib ] && die "no /lib"
-[ ! -d /tmp-rw ] && die "no /tmp-rw"
-[ ! -d /tmp-ro ] && die "no /tmp-ro"
-echo 'x' > /tmp-rw/test-rw || die "non-writeable /tmp-rw"
-echo 'x' > /tmp-ro/test-ro && die "writeable /tmp-ro"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-create-mount-destination b/client/site_tests/security_Minijail0/src/test-create-mount-destination
deleted file mode 100644
index 8bc3963..0000000
--- a/client/site_tests/security_Minijail0/src/test-create-mount-destination
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -v -C / -k tmpfs,%T,tmpfs -b /dev/null,%T/test_null
-
-SRCDIR="$(dirname "$0")"
-
-. "${SRCDIR}"/common.sh
-
-# Check that the "test_null" bind mount has been created.
-grep "test_null" /proc/mounts || die "test_null not mounted"
-exit 0
-
diff --git a/client/site_tests/security_Minijail0/src/test-gid b/client/site_tests/security_Minijail0/src/test-gid
deleted file mode 100644
index aed1bb3..0000000
--- a/client/site_tests/security_Minijail0/src/test-gid
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -g 1000
-
-. "$(dirname "$0")"/common.sh
-
-needregid 1000
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-group b/client/site_tests/security_Minijail0/src/test-group
deleted file mode 100644
index ea18ecf..0000000
--- a/client/site_tests/security_Minijail0/src/test-group
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -g chronos
-
-. "$(dirname "$0")"/common.sh
-
-needregid 1000
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-init b/client/site_tests/security_Minijail0/src/test-init
deleted file mode 100644
index fc52dda..0000000
--- a/client/site_tests/security_Minijail0/src/test-init
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -I
-
-. "$(dirname "$0")"/common.sh
-
-[ "$$" != "1" ] && die "not running as init"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-mount-tmpfs b/client/site_tests/security_Minijail0/src/test-mount-tmpfs
deleted file mode 100644
index 7966701..0000000
--- a/client/site_tests/security_Minijail0/src/test-mount-tmpfs
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -v -C / -k tmpfs,%T,tmpfs,0x1,uid=5446
-
-SRCDIR="$(dirname "$0")"
-
-. "${SRCDIR}"/common.sh
-
-# check that the tmpfs mount has been made in this mount namespace
-grep "tmpfs.*ro.*uid=5446" /proc/mounts || die "tmpfs not mounted with data"
-exit 0
-
diff --git a/client/site_tests/security_Minijail0/src/test-mountns-enter b/client/site_tests/security_Minijail0/src/test-mountns-enter
deleted file mode 100644
index c52adb7..0000000
--- a/client/site_tests/security_Minijail0/src/test-mountns-enter
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -v
-
-SRCDIR="$(dirname "$0")"
-
-. "${SRCDIR}"/common.sh
-
-python "${SRCDIR}"/mountns-enter.py "${SRCDIR}" || die "enter mount ns"
diff --git a/client/site_tests/security_Minijail0/src/test-netns b/client/site_tests/security_Minijail0/src/test-netns
deleted file mode 100644
index d069eb7..0000000
--- a/client/site_tests/security_Minijail0/src/test-netns
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -e
-
-. "$(dirname "$0")"/common.sh
-
-# Look in /proc/net/dev so we get even downed devices.
-lines=$(wc -l < /proc/net/dev)
-
-# Inter-|   Receive                                                |  Transmit
-#  face |bytes    packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
-#    lo:       0       0    0    0    0     0          0         0        0	0    0    0    0     0       0          0
-
-[ $lines -eq 3 ] || die "network interfaces still here"
diff --git a/client/site_tests/security_Minijail0/src/test-pid-file b/client/site_tests/security_Minijail0/src/test-pid-file
deleted file mode 100644
index 1a6e2e2..0000000
--- a/client/site_tests/security_Minijail0/src/test-pid-file
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# setup: mkdir -p "%T/c/bin" "%T/c/lib64" "%T/c/lib" "%T/c/usr/lib" "%T/c/usr/local"
-# args: -b /bin,/bin -b /lib,/lib -b /usr/lib,/usr/lib -b /usr/local,/usr/local -C "%T/c" -f "%T/c/pidfile"
-# args64: -b /lib64,/lib64
-
-read pid < pidfile
-[ "$$" -eq "$pid" ]
diff --git a/client/site_tests/security_Minijail0/src/test-pidns b/client/site_tests/security_Minijail0/src/test-pidns
deleted file mode 100644
index e773312..0000000
--- a/client/site_tests/security_Minijail0/src/test-pidns
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -p
-
-. "$(dirname "$0")"/common.sh
-
-[ "$$" != "2" ] && die "not in a new pid namespace"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-pivotroot b/client/site_tests/security_Minijail0/src/test-pivotroot
deleted file mode 100644
index 99a806c..0000000
--- a/client/site_tests/security_Minijail0/src/test-pivotroot
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# setup: mkdir -p "%T/c/bin" "%T/c/lib64" "%T/c/lib" "%T/c/usr/lib" "%T/c/usr/local" "%T/c/tmp-rw" "%T/c/tmp-ro" "%T/tmp"
-# args: -b /bin,/bin -b /lib,/lib -b /usr/lib,/usr/lib -b /usr/local,/usr/local -b %T/tmp,/tmp-rw,1 -b %T/tmp,/tmp-ro -P "%T/c" -v
-# args64: -b /lib64,/lib64
-
-# Can't get at common.sh from here... oops :)
-die () {
-  echo "$@"
-  exit 1
-}
-
-wd=$(pwd)
-[ "$wd" != "/" ] && die "not in /"
-[ ! -d /lib ] && die "no /lib"
-[ ! -d /tmp-rw ] && die "no /tmp-rw"
-[ ! -d /tmp-ro ] && die "no /tmp-ro"
-echo 'x' > /tmp-rw/test-rw || die "non-writeable /tmp-rw"
-echo 'x' > /tmp-ro/test-ro && die "writeable /tmp-ro"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-remount b/client/site_tests/security_Minijail0/src/test-remount
deleted file mode 100644
index 7ffeb6e..0000000
--- a/client/site_tests/security_Minijail0/src/test-remount
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -r
-
-. "$(dirname "$0")"/common.sh
-
-file="/proc/sys/kernel/printk"
-[ -w "$file" ] && die "can write to $file"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-rlimits b/client/site_tests/security_Minijail0/src/test-rlimits
deleted file mode 100644
index c2ddc0d..0000000
--- a/client/site_tests/security_Minijail0/src/test-rlimits
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -R 13,10,11
-# note that 13 = RLIMIT_NICE
-
-SRCDIR="$(dirname "$0")"
-
-. "${SRCDIR}"/common.sh
-
-grep "Max nice priority\s*10\s*11" /proc/self/limits || die "nice limit not set"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-tmpfs b/client/site_tests/security_Minijail0/src/test-tmpfs
deleted file mode 100644
index 47a44ef..0000000
--- a/client/site_tests/security_Minijail0/src/test-tmpfs
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# setup: mkdir -p "%T/c/bin" "%T/c/lib" "%T/c/lib64" "%T/c/usr/lib" "%T/c/usr/local" "%T/c/usr/bin" "%T/c/tmp"
-# args: -b /bin,/bin -b /lib,/lib -b /usr/lib,/usr/lib -b /usr/bin,/usr/bin -b /usr/local,/usr/local -C "%T/c" -t -v
-# args64: -b /lib64,/lib64
-
-# Can't get at common.sh from here... oops :)
-die () {
-  echo "$@"
-  exit 1
-}
-
-fs=$(stat -f /tmp -c %T)
-[ "$fs" != "tmpfs" ] && die "tmpfs"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-uid b/client/site_tests/security_Minijail0/src/test-uid
deleted file mode 100644
index 00d84bf..0000000
--- a/client/site_tests/security_Minijail0/src/test-uid
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -u 1000
-
-. "$(dirname "$0")"/common.sh
-
-needreuid 1000
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-user b/client/site_tests/security_Minijail0/src/test-user
deleted file mode 100644
index 7a174e3..0000000
--- a/client/site_tests/security_Minijail0/src/test-user
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -u chronos
-
-. "$(dirname "$0")"/common.sh
-
-needreuid 1000
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-usergroups b/client/site_tests/security_Minijail0/src/test-usergroups
deleted file mode 100644
index a1bccae..0000000
--- a/client/site_tests/security_Minijail0/src/test-usergroups
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -u chronos -g chronos -G
-
-needgroup=audio # a group chronos is in and root isn't
-neednogroup=root # we'd better not still be in root...
-
-. "$(dirname "$0")"/common.sh
-
-groups=$(groups)
-if ! echo "$groups" | grep -Eq "\b$needgroup\b"; then
-  die "$needgroup not in $groups"
-fi
-if echo "$groups" | grep -Eq "\b$neednogroup\b"; then
-  die "$neednogroup in $groups";
-fi
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-userns b/client/site_tests/security_Minijail0/src/test-userns
deleted file mode 100644
index 5cf4b36..0000000
--- a/client/site_tests/security_Minijail0/src/test-userns
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -U
-# expected_ugid: 0 0
-
-. "$(dirname "$0")"/common.sh
-
-needreuid 65534
-needregid 65534
-touch $1
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-userns-gid b/client/site_tests/security_Minijail0/src/test-userns-gid
deleted file mode 100644
index 167a4ed..0000000
--- a/client/site_tests/security_Minijail0/src/test-userns-gid
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -M"0 1000 1"
-# expected_ugid: 0 1000
-
-. "$(dirname "$0")"/common.sh
-
-needreuid 65534
-needregid 0
-touch $1
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-userns-init b/client/site_tests/security_Minijail0/src/test-userns-init
deleted file mode 100644
index e699983..0000000
--- a/client/site_tests/security_Minijail0/src/test-userns-init
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -m"0 1000 1" -M"0 1000 1" -I
-
-. "$(dirname "$0")"/common.sh
-
-[ "$$" != "1" ] && die "not running as init"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-userns-netns b/client/site_tests/security_Minijail0/src/test-userns-netns
deleted file mode 100644
index f38522d..0000000
--- a/client/site_tests/security_Minijail0/src/test-userns-netns
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -m"0 1000 1" -M"0 1000 1" -e
-
-. "$(dirname "$0")"/common.sh
-
-# Look in /proc/net/dev so we get even downed devices.
-lines=$(wc -l < /proc/net/dev)
-
-# Inter-|   Receive                                                |  Transmit
-#  face |bytes    packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
-#    lo:       0       0    0    0    0     0          0         0        0	0    0    0    0     0       0          0
-
-[ $lines -eq 3 ] || die "network interfaces still here"
diff --git a/client/site_tests/security_Minijail0/src/test-userns-pidns b/client/site_tests/security_Minijail0/src/test-userns-pidns
deleted file mode 100644
index 2367eaf..0000000
--- a/client/site_tests/security_Minijail0/src/test-userns-pidns
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -m"0 1000 1" -M"0 1000 1" -p
-
-. "$(dirname "$0")"/common.sh
-
-[ "$$" != "2" ] && die "not in a new pid namespace"
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-userns-ugid b/client/site_tests/security_Minijail0/src/test-userns-ugid
deleted file mode 100644
index 26c3c43..0000000
--- a/client/site_tests/security_Minijail0/src/test-userns-ugid
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -m"0 1000 1" -M"0 1000 1"
-# expected_ugid: 1000 1000
-
-. "$(dirname "$0")"/common.sh
-
-needreuid 0
-needregid 0
-touch $1
-exit 0
diff --git a/client/site_tests/security_Minijail0/src/test-userns-uid b/client/site_tests/security_Minijail0/src/test-userns-uid
deleted file mode 100644
index 99c2158..0000000
--- a/client/site_tests/security_Minijail0/src/test-userns-uid
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# args: -m"0 1000 1"
-# expected_ugid: 1000 0
-
-. "$(dirname "$0")"/common.sh
-
-needreuid 0
-needregid 65534
-touch $1
-exit 0
diff --git a/client/site_tests/security_Minijail_seccomp/control b/client/site_tests/security_Minijail_seccomp/control
deleted file mode 100644
index 674a22e..0000000
--- a/client/site_tests/security_Minijail_seccomp/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Tests seccomp_filter setup in minijail.
-"""
-NAME = "security_Minijail_seccomp"
-PURPOSE = "Regression test for seccomp_filter setup from minijail."
-CRITERIA = """
-Fail if either seccomp_filter fails to block syscalls or if allowed syscalls
-are blocked incorrectly.
-"""
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-ATTRIBUTES = "suite:bvt-inline, suite:smoke"
-
-job.run_test("security_Minijail_seccomp")
diff --git a/client/site_tests/security_Minijail_seccomp/policy b/client/site_tests/security_Minijail_seccomp/policy
deleted file mode 100644
index 0672a42..0000000
--- a/client/site_tests/security_Minijail_seccomp/policy
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-openat: 1
-read: 1
-close: 1
-exit: 1
-exit_group: 1
diff --git a/client/site_tests/security_Minijail_seccomp/policy-privdrop_arm b/client/site_tests/security_Minijail_seccomp/policy-privdrop_arm
deleted file mode 100644
index c85ad70..0000000
--- a/client/site_tests/security_Minijail_seccomp/policy-privdrop_arm
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-openat: 1
-read: 1
-close: 1
-exit: 1
-exit_group: 1
-# Privilege-dropping syscalls.
-setgroups32: 1
-setresuid32: 1
-setresgid32: 1
diff --git a/client/site_tests/security_Minijail_seccomp/policy-privdrop_arm64 b/client/site_tests/security_Minijail_seccomp/policy-privdrop_arm64
deleted file mode 100644
index c00d085..0000000
--- a/client/site_tests/security_Minijail_seccomp/policy-privdrop_arm64
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-openat: 1
-read: 1
-close: 1
-exit: 1
-exit_group: 1
-# Privilege-dropping syscalls.
-setgroups: 1
-setresuid: 1
-setresgid: 1
diff --git a/client/site_tests/security_Minijail_seccomp/policy-privdrop_i386 b/client/site_tests/security_Minijail_seccomp/policy-privdrop_i386
deleted file mode 100644
index f02b7b6..0000000
--- a/client/site_tests/security_Minijail_seccomp/policy-privdrop_i386
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-openat: 1
-read: 1
-close: 1
-exit: 1
-exit_group: 1
-# Privilege-dropping syscalls.
-setgroups32: 1
-setresgid32: 1
-setresuid32: 1
diff --git a/client/site_tests/security_Minijail_seccomp/policy-privdrop_x86_64 b/client/site_tests/security_Minijail_seccomp/policy-privdrop_x86_64
deleted file mode 100644
index 7e83481..0000000
--- a/client/site_tests/security_Minijail_seccomp/policy-privdrop_x86_64
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-openat: 1
-read: 1
-close: 1
-exit: 1
-exit_group: 1
-# Privilege-dropping syscalls.
-setgroups: 1
-setresgid: 1
-setresuid: 1
diff --git a/client/site_tests/security_Minijail_seccomp/policy-rdonly b/client/site_tests/security_Minijail_seccomp/policy-rdonly
deleted file mode 100644
index 54c2c89..0000000
--- a/client/site_tests/security_Minijail_seccomp/policy-rdonly
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# O_RDONLY
-openat: arg2 == 0
-read: 1
-close: 1
-exit: 1
-exit_group: 1
diff --git a/client/site_tests/security_Minijail_seccomp/policy-wronly b/client/site_tests/security_Minijail_seccomp/policy-wronly
deleted file mode 100644
index fa859a7..0000000
--- a/client/site_tests/security_Minijail_seccomp/policy-wronly
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# O_WRONLY
-openat: arg2 & 01
-read: 1
-close: 1
-exit: 1
-exit_group: 1
diff --git a/client/site_tests/security_Minijail_seccomp/security_Minijail_seccomp.py b/client/site_tests/security_Minijail_seccomp/security_Minijail_seccomp.py
deleted file mode 100644
index 1d211e7..0000000
--- a/client/site_tests/security_Minijail_seccomp/security_Minijail_seccomp.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from collections import namedtuple
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-Jail = namedtuple("Jail", "user policy nnp")
-
-class security_Minijail_seccomp(test.test):
-    version = 1
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make('clean')
-        utils.make()
-
-
-    def run_test(self, exe, args, jail, expected_ret, pretty_msg):
-        cmdline = '/sbin/minijail0'
-
-        if jail.user:
-            cmdline += ' -u %s' % jail.user
-
-        if jail.nnp:
-            cmdline += ' -n'
-
-        cmdline += ' -S %s/%s %s/%s' % (self.bindir, jail.policy,
-                                        self.bindir, exe)
-
-        if len(args) > 0:
-            cmdline += ' %s' % ' '.join(args)
-
-        logging.info("Command line: " + cmdline)
-        ret = utils.system(cmdline, ignore_status=True)
-
-        if ret != expected_ret:
-            logging.error("ret: %d, expected: %d" % (ret, expected_ret))
-            raise error.TestFail(pretty_msg)
-
-
-    def run_once(self):
-        privdrop_policy = "policy-privdrop_" + utils.get_arch_userspace()
-
-        case_ok = ("ok", [],
-                   Jail(None, "policy", nnp=False),
-                   0, "Allowed system calls failed")
-        case_block_privdrop = ("ok", [],
-                               Jail("chronos", "policy", nnp=False),
-                               253, "Blocked priv-drop system calls succeeded")
-        case_allow_privdrop = ("ok", [],
-                               Jail("chronos", privdrop_policy, nnp=False),
-                               0, "Allowed system calls failed")
-        case_no_new_privs = ("ok", [],
-                             Jail("chronos", "policy", nnp=True),
-                             0, "Allowed system calls failed")
-        case_fail = ("fail", [],
-                     Jail(None, "policy", nnp=False),
-                     253, "Blocked system calls succeeded")
-
-        case_arg_equals_ok = ("open", ["0"],
-                              Jail(None, "policy-rdonly", nnp=False),
-                              0, "Allowing system calls via args == failed")
-        case_arg_equals_fail = ("open", ["1"],
-                                Jail(None, "policy-rdonly", nnp=False),
-                                253, "Blocking system calls via args == failed")
-        case_arg_flags_ok = ("open", ["1"],
-                             Jail(None, "policy-wronly", nnp=False),
-                             0, "Allowing system calls via args & failed")
-        case_arg_flags_ok = ("open", ["2"],
-                             Jail(None, "policy-wronly", nnp=False),
-                             253, "Blocking system calls via args & failed")
-
-        for case in [case_ok, case_block_privdrop, case_allow_privdrop,
-                     case_no_new_privs, case_fail,
-                     case_arg_equals_ok, case_arg_equals_fail,
-                     case_arg_flags_ok]:
-            self.run_test(*case)
diff --git a/client/site_tests/security_Minijail_seccomp/src/Makefile b/client/site_tests/security_Minijail_seccomp/src/Makefile
deleted file mode 100644
index 3c829c0..0000000
--- a/client/site_tests/security_Minijail_seccomp/src/Makefile
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-CFLAGS =
-LDFLAGS =
-
-SRC_OK = ok.c
-SRC_FAIL = fail.c
-SRC_OPEN = open.c
-
-TARGET_OK = ../ok
-TARGET_FAIL = ../fail
-TARGET_OPEN = ../open
-
-all: $(TARGET_OK) $(TARGET_FAIL) $(TARGET_OPEN)
-
-$(TARGET_OK): $(SRC_OK)
-	$(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS)
-
-$(TARGET_FAIL): $(SRC_FAIL)
-	$(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS)
-
-$(TARGET_OPEN): $(SRC_OPEN)
-	$(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS)
-
-clean:
-	$(RM) $(TARGET_OK)
-	$(RM) $(TARGET_FAIL)
-	$(RM) $(TARGET_OPEN)
diff --git a/client/site_tests/security_Minijail_seccomp/src/fail.c b/client/site_tests/security_Minijail_seccomp/src/fail.c
deleted file mode 100644
index 882c7e3..0000000
--- a/client/site_tests/security_Minijail_seccomp/src/fail.c
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <fcntl.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <asm/unistd.h>
-
-#define SIZE 1024
-
-int main(int argc, char **argv) {
-  char buf[SIZE];
-  int fd_z = syscall(__NR_openat, AT_FDCWD, "/dev/zero", O_RDONLY);
-  int fd_n = syscall(__NR_openat, AT_FDCWD, "/dev/null", O_RDONLY);
-  int nr = syscall(__NR_read, fd_z, buf, SIZE);
-  int nw = syscall(__NR_write, fd_n, buf, SIZE);
-  syscall(__NR_close, fd_z);
-  syscall(__NR_close, fd_n);
-  syscall(__NR_exit, 0);
-}
diff --git a/client/site_tests/security_Minijail_seccomp/src/ok.c b/client/site_tests/security_Minijail_seccomp/src/ok.c
deleted file mode 100644
index e0fed4e..0000000
--- a/client/site_tests/security_Minijail_seccomp/src/ok.c
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <fcntl.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <asm/unistd.h>
-
-#define SIZE 1024
-
-int main(int argc, char **argv) {
-  char buf[SIZE];
-  int fd = syscall(__NR_openat, AT_FDCWD, "/dev/zero", O_RDONLY);
-  int n = syscall(__NR_read, fd, buf, SIZE);
-  syscall(__NR_close, fd);
-  syscall(__NR_exit, 0);
-}
diff --git a/client/site_tests/security_Minijail_seccomp/src/open.c b/client/site_tests/security_Minijail_seccomp/src/open.c
deleted file mode 100644
index 0d902ba..0000000
--- a/client/site_tests/security_Minijail_seccomp/src/open.c
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <fcntl.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <asm/unistd.h>
-
-void usage(const char *comm) {
-  fprintf(stderr, "Usage: %s <access mode>\n", comm);
-  fprintf(stderr, "\tAccess mode: 0-O_RDONLY, 1-O_WRONLY, 2-O_RDWR\n");
-  return;
-}
-
-int main(int argc, char **argv) {
-  if (argc < 2) {
-    usage(argv[0]);
-    return 1;
-  }
-
-  unsigned int access_mode = strtoul(argv[1], NULL, 0);
-  if (access_mode < 0 || access_mode > 2) {
-    usage(argv[0]);
-    return 1;
-  }
-
-  char *path;
-  int flags;
-
-  switch (access_mode) {
-    case 0:
-      path = "/dev/zero";
-      flags = O_RDONLY;
-      break;
-    case 1:
-      path = "/dev/null";
-      flags = O_WRONLY;
-      break;
-    case 2:
-      path = "/dev/null";
-      flags = O_RDWR;
-      break;
-    default:
-      usage(argv[0]);
-      return 1;
-  }
-
-  int fd = syscall(__NR_openat, AT_FDCWD, path, flags);
-  syscall(__NR_close, fd);
-  syscall(__NR_exit, 0);
-}
diff --git a/client/site_tests/security_RootCA/baseline.json b/client/site_tests/security_RootCA/baseline.json
deleted file mode 100644
index 57f4d9e..0000000
--- a/client/site_tests/security_RootCA/baseline.json
+++ /dev/null
@@ -1,270 +0,0 @@
-{
-	"both": {
-	"01:0C:06:95:A6:98:19:14:FF:BF:5F:C6:B0:B6:95:EA:29:E9:12:A6":
-		"Hellenic Academic and Research Institutions RootCA 2015",
-	"02:FA:F3:E2:91:43:54:68:60:78:57:69:4D:F5:E4:5B:68:85:18:68":
-		"AddTrust External Root",
-	"03:9E:ED:B8:0B:E7:A0:3C:69:53:89:3B:20:D2:D9:32:3A:4C:2A:FD":
-		"GeoTrust Primary Certification Authority - G3",
-	"05:63:B8:63:0D:62:D7:5A:BB:C8:AB:1E:4B:DF:B5:A8:99:B2:4D:43":
-		"DigiCert Assured ID Root CA",
-	"06:08:3F:59:3F:15:A1:04:A0:69:A4:6B:A9:03:D0:06:B7:97:09:91":
-		"NetLock Arany =Class Gold= FÅ‘tanúsítvány",
-	"07:E0:32:E0:20:B7:2C:3F:19:2F:06:28:A2:59:3A:19:A7:0F:06:9E":
-		"Certum Trusted Network CA",
-	"09:3C:61:F3:8B:8B:DC:7D:55:DF:75:38:02:05:00:E1:25:F5:C8:36":
-		"QuoVadis Root CA 2 G3",
-	"0D:44:DD:8C:3C:8C:1A:1A:58:75:64:81:E9:0F:2E:2A:FF:B3:D2:6E":
-		"Amazon Root CA 3",
-	"0F:36:38:5B:81:1A:25:C3:9B:31:4E:83:CA:E9:34:66:70:CC:74:B4":
-		"GDCA TrustAUTH R5 ROOT",
-	"0F:F9:40:76:18:D3:D7:6A:4B:98:F0:A8:35:9E:0C:FD:27:AC:CC:ED":
-		"OISTE WISeKey Global Root GB CA",
-	"13:2D:0D:45:53:4B:69:97:CD:B2:D5:C3:39:E2:55:76:60:9B:5C:C6":
-		"Verisign Class 3 Public Primary Certification Authority - G3",
-	"1B:8E:EA:57:96:29:1A:C9:39:EA:B8:0A:81:1A:73:73:C0:93:79:67":
-		"QuoVadis Root CA 1 G3",
-	"1E:0E:56:19:0A:D1:8B:25:98:B2:04:44:FF:66:8A:04:17:99:5F:3F":
-		"LuxTrust Global Root 2",
-	"1F:24:C6:30:CD:A4:18:EF:20:69:FF:AD:4F:DD:5F:46:3A:1B:69:AA":
-		"GlobalSign ECC Root CA - R5",
-	"1F:49:14:F7:D8:74:95:1D:DD:AE:02:C0:BE:FD:3A:2D:82:75:51:85":
-		"QuoVadis Root CA 3",
-	"20:D8:06:40:DF:9B:25:F5:12:25:3A:11:EA:F7:59:8A:EB:14:B5:47":
-		"Entrust Root Certification Authority - EC1",
-	"22:D5:D8:DF:8F:02:31:D1:8D:F7:9D:B7:CF:8A:2D:64:C9:3F:6C:3A":
-		"VeriSign Class 3 Public Primary Certification Authority - G4",
-	"22:FD:D0:B7:FD:A2:4E:0D:AC:49:2C:A0:AC:A6:7B:6A:1F:E3:F7:66":
-		"Certplus Root CA G1",
-	"27:96:BA:E6:3F:18:01:E2:77:26:1B:A0:D7:77:70:02:8F:20:EE:E4":
-		"Go Daddy Class 2 CA",
-	"28:90:3A:63:5B:52:80:FA:E6:77:4C:0B:6D:A7:D6:BA:A6:4A:F2:E8":
-		"EC-ACC",
-	"29:36:21:02:8B:20:ED:02:F5:66:C5:32:D1:D6:ED:90:9F:45:00:2F":
-		"AffirmTrust Networking",
-	"2B:8F:1B:57:33:0D:BB:A2:D0:7A:6C:51:F7:0E:E9:0D:DA:B9:AD:8E":
-		"USERTrust RSA Certification Authority",
-	"2B:B1:F5:3E:55:0C:1D:C5:F1:D4:E6:B7:6A:46:4B:55:06:02:AC:21":
-		"Atos TrustedRoot 2011",
-	"2F:78:3D:25:52:18:A7:4A:65:39:71:B5:2C:A2:9C:45:15:6F:E9:19":
-		"Izenpe.com",
-	"31:43:64:9B:EC:CE:27:EC:ED:3A:3F:0B:8F:0D:E4:E8:91:DD:EE:CA":
-		"TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1",
-	"32:3C:11:8E:1B:F7:B8:B6:52:54:E2:E2:10:0D:D6:02:90:37:F0:96":
-		"GeoTrust Primary Certification Authority",
-	"36:79:CA:35:66:87:72:30:4D:30:A5:FB:87:3B:0F:A7:7B:B7:0D:54":
-		"VeriSign Universal Root Certification Authority",
-	"36:B1:2B:49:F9:81:9E:D7:4C:9E:BC:38:0F:C6:56:8F:5D:AC:B2:F7":
-		"Security Communication Root CA",
-	"37:9A:19:7B:41:85:45:35:0C:A6:03:69:F3:3C:2E:AF:47:4F:20:79":
-		"GeoTrust Universal CA 2",
-	"37:F7:6D:E6:07:7C:90:C5:B1:3E:93:1A:B7:41:10:B4:F2:E4:9A:27":
-		"Sonera Class 2 Root CA",
-	"3A:44:73:5A:E5:81:90:1F:24:86:61:46:1E:3B:9C:C4:5F:F5:3A:1B":
-		"Secure Global CA",
-	"3B:C0:38:0B:33:C3:F6:A6:0C:86:15:22:93:D9:DF:F5:4B:81:C0:04":
-		"Trustis FPS Root CA",
-	"3B:C4:9F:48:F8:F3:73:A0:9C:1E:BD:F8:5B:B1:C3:65:C7:D8:11:B3":
-		"SecureSign RootCA11",
-	"43:13:BB:96:F1:D5:86:9B:C1:4E:6A:92:F6:CF:F6:34:69:87:82:37":
-		"TeliaSonera Root CA v1",
-	"47:BE:AB:C9:22:EA:E8:0E:78:78:34:62:A7:9F:45:C2:54:FD:E6:8B":
-		"Go Daddy Root Certificate Authority - G2",
-	"48:12:BD:92:3C:A8:C4:39:06:E7:30:6D:27:96:E6:A4:CF:22:2E:7D":
-		"QuoVadis Root CA 3 G3",
-	"49:0A:75:74:DE:87:0A:47:FE:58:EE:F6:C7:6B:EB:C6:0B:12:40:99":
-		"Buypass Class 2 Root CA",
-	"4A:BD:EE:EC:95:0D:35:9C:89:AE:C7:52:A1:2C:5B:29:F6:D6:AA:0C":
-		"Global Chambersign Root - 2008",
-	"4C:DD:51:A3:D1:F5:20:32:14:B0:C6:C5:32:23:03:91:C7:46:42:6D":
-		"SSL.com EV Root Certification Authority ECC",
-	"4E:B6:D5:78:49:9B:1C:CF:5F:58:1E:AD:56:BE:3D:9B:67:44:A5:E5":
-		"VeriSign Class 3 Public Primary Certification Authority - G5",
-	"4F:65:8E:1F:E9:06:D8:28:02:E9:54:47:41:C9:54:25:5D:69:CC:1A":
-		"Certplus Root CA G2",
-	"50:30:06:09:1D:97:D4:F5:AE:39:F7:CB:E7:92:7D:7D:65:2D:34:31":
-		"Entrust.net Premium 2048 Secure Server CA",
-	"51:C6:E7:08:49:06:6E:F3:92:D4:5C:A0:0D:6D:A3:62:8F:C3:52:39":
-		"E-Tugra Certification Authority",
-	"55:A6:72:3E:CB:F2:EC:CD:C3:23:74:70:19:9D:2A:BE:11:E3:81:D1":
-		"T-TeleSec GlobalRoot Class 3",
-	"58:D1:DF:95:95:67:6B:63:C0:F0:5B:1C:17:4D:8B:84:0B:C8:78:BD":
-		"TrustCor ECA-1",
-	"58:E8:AB:B0:36:15:33:FB:80:F7:9B:1B:6D:29:D3:FF:8D:5F:00:F0":
-		"D-TRUST Root Class 3 CA 2 2009",
-	"59:0D:2D:7D:88:4F:40:2E:61:7E:A5:62:32:17:65:CF:17:D8:94:E9":
-		"T-TeleSec GlobalRoot Class 2",
-	"59:22:A1:E1:5A:EA:16:35:21:F8:98:39:6A:46:46:B0:44:1B:0F:A9":
-		"OISTE WISeKey Global Root GA CA",
-	"59:AF:82:79:91:86:C7:B4:75:07:CB:CF:03:57:46:EB:04:DD:B7:16":
-		"Staat der Nederlanden Root CA - G2",
-	"5A:8C:EF:45:D7:A6:98:59:76:7A:8C:8B:44:96:B5:78:CF:47:4B:1A":
-		"Amazon Root CA 2",
-	"5F:3B:8C:F2:F8:10:B3:7D:78:B4:CE:EC:19:19:C3:73:34:B9:C7:74":
-		"Security Communication RootCA2",
-	"5F:43:E5:B1:BF:F8:78:8C:AC:1C:C7:CA:4A:9A:C6:22:2B:CC:34:C6":
-		"Cybertrust Global Root",
-	"5F:B7:EE:06:33:E2:59:DB:AD:0C:4C:9A:E6:D3:8F:1A:61:C7:DC:25":
-		"DigiCert High Assurance EV Root CA",
-	"66:31:BF:9E:F7:4F:9E:B6:C9:D5:A6:0C:BA:6A:BE:D1:F7:BD:EF:7B":
-		"COMODO Certification Authority",
-	"67:65:0D:F1:7E:8E:7E:5B:82:40:A4:F4:56:4B:CF:E2:3D:69:C6:F0":
-		"ePKI Root Certification Authority",
-	"69:69:56:2E:40:80:F4:24:A1:E7:19:9F:14:BA:F3:EE:58:AB:6A:BB":
-		"GlobalSign ECC Root CA - R4",
-	"6E:26:64:F3:56:BF:34:55:BF:D1:93:3F:7C:01:DE:D8:13:DA:8A:A6":
-		"OpenTrust Root CA G3",
-	"70:17:9B:86:8C:00:A4:FA:60:91:52:22:3F:9F:3E:32:BD:E0:05:62":
-		"Visa eCommerce Root",
-	"74:20:74:41:72:9C:DD:92:EC:79:31:D8:23:10:8D:C2:81:92:E2:BB":
-		"Certplus Class 2 Primary CA",
-	"74:3A:F0:52:9B:D0:32:A0:F4:4A:83:CD:D4:BA:A9:7B:7C:2E:C4:9A":
-		"SSL.com EV Root Certification Authority RSA R2",
-	"74:F8:A3:C3:EF:E7:B3:90:06:4B:83:90:3C:21:64:60:20:E5:DF:CE":
-		"Network Solutions Certificate Authority",
-	"75:E0:AB:B6:13:85:12:27:1C:04:F8:5F:DD:DE:38:E4:B7:24:2E:FE":
-		"GlobalSign Root CA - R2",
-	"76:E2:7E:C1:4F:DB:82:C1:C0:A6:75:B5:05:BE:3D:29:B4:ED:DB:BB":
-		"Staat der Nederlanden EV Root CA",
-	"78:6A:74:AC:76:AB:14:7F:9C:6A:30:50:BA:9E:A8:7E:FE:9A:CE:3C":
-		"Chambers of Commerce Root - 2008",
-	"79:5F:88:60:C5:AB:7C:3D:92:E6:CB:F4:8D:E1:45:CD:11:EF:60:0B":
-		"OpenTrust Root CA G2",
-	"79:91:E8:34:F7:E2:EE:DD:08:95:01:52:E9:55:2D:14:E9:58:D5:7E":
-		"OpenTrust Root CA G1",
-	"7E:04:DE:89:6A:3E:66:6D:00:E6:87:D3:3F:FA:D9:3B:E8:3D:34:9E":
-		"DigiCert Global Root G3",
-	"85:A4:08:C0:9C:19:3E:5D:51:58:7D:CD:D6:13:30:FD:8C:DE:37:BF":
-		"Deutsche Telekom Root CA 2",
-	"87:82:C6:C3:04:35:3B:CF:D2:96:92:D2:59:3E:7D:44:D9:34:FF:11":
-		"SecureTrust CA",
-	"89:DF:74:FE:5C:F4:0F:4A:80:F9:E3:37:7D:54:DA:91:E1:01:31:8E":
-		"Microsec e-Szigno Root CA 2009",
-	"8C:F4:27:FD:79:0C:3A:D1:66:06:8D:E8:1E:57:EF:BB:93:22:72:D4":
-		"Entrust Root Certification Authority - G2",
-	"8D:17:84:D5:37:F3:03:7D:EC:70:FE:57:8B:51:9A:99:E6:10:D7:B0":
-		"GeoTrust Primary Certification Authority - G2",
-	"8D:A7:F9:65:EC:5E:FC:37:91:0F:1C:6E:59:FD:C1:CC:6A:6E:DE:16":
-		"Amazon Root CA 1",
-	"91:C6:D6:EE:3E:8A:C8:63:84:E5:48:C2:99:29:5C:75:6C:81:7B:81":
-		"thawte Primary Root CA",
-	"92:5A:8F:8D:2C:6D:04:E0:66:5F:59:6A:FF:22:D8:63:E8:25:6F:3F":
-		"Starfield Services Root Certificate Authority - G2",
-	"93:05:7A:88:15:C6:4F:CE:88:2F:FA:91:16:52:28:78:BC:53:64:17":
-		"ACCVRAIZ1",
-	"96:C9:1B:0B:95:B4:10:98:42:FA:D0:D8:22:79:FE:60:FA:B9:16:83":
-		"D-TRUST Root Class 3 CA 2 EV 2009",
-	"9B:AA:E5:9F:56:EE:21:CB:43:5A:BE:25:93:DF:A7:F0:40:D1:1D:CB":
-		"SwissSign Silver CA - G2",
-	"9C:BB:48:53:F6:A4:F6:D3:52:A4:E8:32:52:55:60:13:F5:AD:AF:65":
-		"TWCA Global Root CA",
-	"9D:70:BB:01:A5:A4:A0:18:11:2E:F7:1C:01:B9:32:C5:34:E7:88:A8":
-		"Certinomis - Root CA",
-	"9F:74:4E:9F:2B:4D:BA:EC:0F:31:2C:50:B6:56:3B:8E:2D:93:C3:11":
-		"COMODO ECC Certification Authority",
-	"9F:F1:71:8D:92:D5:9A:F3:7D:74:97:B4:BC:6F:84:68:0B:BA:B6:66":
-		"Hellenic Academic and Research Institutions ECC RootCA 2015",
-	"A1:4B:48:D9:43:EE:0A:0E:40:90:4F:3C:E0:A4:C0:91:93:51:5D:3F":
-		"DigiCert Assured ID Root G2",
-	"A8:98:5D:3A:65:E5:E5:C4:B2:D7:D6:6D:40:C6:DD:2F:B1:9C:54:36":
-		"DigiCert Global Root CA",
-	"AA:DB:BC:22:23:8F:C4:01:A1:27:BB:38:DD:F4:1D:DB:08:9E:F0:12":
-		"thawte Primary Root CA - G2",
-	"AD:7E:1C:28:B0:64:EF:8F:60:03:40:20:14:C3:D0:E3:37:0E:B5:8A":
-		"Starfield Class 2 CA",
-	"AE:C5:FB:3F:C8:E1:BF:C4:E5:4F:03:07:5A:9A:E8:00:B7:F7:B6:FA":
-		"Autoridad de Certificacion Firmaprofesional CIF A62634068",
-	"AF:E5:D2:44:A8:D1:19:42:30:FF:47:9F:E2:F8:97:BB:CD:7A:8C:B4":
-		"COMODO RSA Certification Authority",
-	"B1:2E:13:63:45:86:A4:6F:1A:B2:60:68:37:58:2D:C4:AC:FD:94:97":
-		"Certigna",
-	"B1:BC:96:8B:D4:F4:9D:62:2A:A8:9A:81:F2:15:01:52:A4:1D:82:9C":
-		"GlobalSign Root CA",
-	"B3:1E:B1:B7:40:E3:6C:84:02:DA:DC:37:D4:4D:F5:D4:67:49:52:F9":
-		"Entrust Root Certification Authority",
-	"B5:1C:06:7C:EE:2B:0C:3D:F8:55:AB:2D:92:F4:FE:39:D4:E7:0F:0E":
-		"Starfield Root Certificate Authority - G2",
-	"B5:61:EB:EA:A4:DE:E4:25:4B:69:1A:98:A5:57:47:C2:34:C7:D9:71":
-		"CA Disig Root R2",
-	"B7:AB:33:08:D1:EA:44:77:BA:14:80:12:5A:6F:BD:A9:36:49:0C:BB":
-		"SSL.com Root Certification Authority RSA",
-	"B8:01:86:D1:EB:9C:86:A5:41:04:CF:30:54:F3:4C:52:B7:E5:58:C6":
-		"XRamp Global CA Root",
-	"B8:23:6B:00:2F:1D:16:86:53:01:55:6C:11:A4:37:CA:EB:FF:C3:BB":
-		"AffirmTrust Premium ECC",
-	"B8:BE:6D:CB:56:F1:55:B9:63:D4:12:CA:4E:06:34:C7:94:B2:1C:C0":
-		"TrustCor RootCert CA-2",
-	"BA:29:41:60:77:98:3F:F4:F3:EF:F2:31:05:3B:2E:EA:6D:4D:45:FD":
-		"IdenTrust Public Sector Root CA 1",
-	"C3:19:7C:39:24:E6:54:AF:1B:C4:AB:20:95:7A:E2:C3:0E:13:02:6A":
-		"SSL.com Root Certification Authority ECC",
-	"C9:A8:B9:E7:55:80:5E:58:E3:53:77:A7:25:EB:AF:C3:7B:27:CC:D7":
-		"EE Certification Centre Root CA",
-	"CA:3A:FB:CF:12:40:36:4B:44:B2:16:20:88:80:48:39:19:93:7C:F7":
-		"QuoVadis Root CA 2",
-	"CA:BD:2A:79:A1:07:6A:31:F2:1D:25:36:35:CB:03:9D:43:29:A5:E8":
-		"ISRG Root X1",
-	"CF:9E:87:6D:D3:EB:FC:42:26:97:A3:B5:A3:7A:A0:76:A9:06:23:48":
-		"TWCA Root Certification Authority",
-	"D1:CB:CA:5D:B2:D5:2A:7F:69:3B:67:4D:E5:F0:5A:1D:0C:95:7D:F0":
-		"USERTrust ECC Certification Authority",
-	"D1:EB:23:A4:6D:17:D6:8F:D9:25:64:C2:F1:F1:60:17:64:D8:E3:49":
-		"Comodo AAA Services root",
-	"D3:DD:48:3E:2B:BF:4C:05:E8:AF:10:F5:FA:76:26:CF:D3:DC:30:92":
-		"Certum Trusted Network CA 2",
-	"D4:DE:20:D0:5E:66:FC:53:FE:1A:50:88:2C:78:DB:28:52:CA:E4:74":
-		"Baltimore CyberTrust Root",
-	"D6:9B:56:11:48:F0:1C:77:C5:45:78:C1:09:26:DF:5B:85:69:76:AD":
-		"GlobalSign Root CA - R3",
-	"D6:DA:A8:20:8D:09:D2:15:4D:24:B5:2F:CB:34:6E:B2:58:B2:8A:58":
-		"Hongkong Post Root CA 1",
-	"D8:A6:33:2C:E0:03:6F:B1:85:F6:63:4F:7D:6A:06:65:26:32:28:27":
-		"AffirmTrust Premium",
-	"D8:C5:38:8A:B7:30:1B:1B:6E:D4:7A:E6:45:25:3A:6F:9F:1A:27:61":
-		"SwissSign Gold CA - G2",
-	"D8:EB:6B:41:51:92:59:E0:F3:E7:85:00:C0:3D:B6:88:97:C9:EE:FC":
-		"Staat der Nederlanden Root CA - G3",
-	"DA:C9:02:4F:54:D8:F6:DF:94:93:5F:B1:73:26:38:CA:6A:D7:7C:13":
-		"DST Root CA X3",
-	"DA:FA:F7:FA:66:84:EC:06:8F:14:50:BD:C7:C2:81:A5:BC:A9:64:57":
-		"Buypass Class 3 Root CA",
-	"DD:FB:16:CD:49:31:C9:73:A2:03:7D:3F:C8:3A:4D:7D:77:5D:05:E4":
-		"DigiCert Trusted Root G4",
-	"DE:28:F4:A4:FF:E5:B9:2F:A3:C5:03:D1:A3:49:A7:F9:96:2A:82:12":
-		"GeoTrust Global CA",
-	"DE:3F:40:BD:50:93:D3:9B:6C:60:F6:DA:BC:07:62:01:00:89:76:C9":
-		"QuoVadis Root CA",
-	"DF:3C:24:F9:BF:D6:66:76:1B:26:80:73:FE:06:D1:CC:8D:4F:82:A4":
-		"DigiCert Global Root G2",
-	"DF:71:7E:AA:4A:D9:4E:C9:55:84:99:60:2D:48:DE:5F:BC:F0:3A:25":
-		"IdenTrust Commercial Root CA 1",
-	"E2:52:FA:95:3F:ED:DB:24:60:BD:6E:28:F3:9C:CC:CF:5E:B3:3F:DE":
-		"SZAFIR ROOT CA2",
-	"E2:B8:29:4B:55:84:AB:6B:58:C2:90:46:6C:AC:3F:B8:39:8F:84:83":
-		"CFCA EV ROOT",
-	"E6:21:F3:35:43:79:05:9A:4B:68:30:9D:8A:2F:74:22:15:87:EC:79":
-		"GeoTrust Universal CA",
-	"EC:50:35:07:B2:15:C4:95:62:19:E2:A8:9A:5B:42:99:2C:4C:2C:20":
-		"AC RAIZ FNMT-RCM",
-	"F1:8B:53:8D:1B:E9:03:B6:A6:F0:56:43:5B:17:15:89:CA:F3:6B:F2":
-		"thawte Primary Root CA - G3",
-	"F3:73:B3:87:06:5A:28:84:8A:F2:F3:4A:CE:19:2B:DD:C7:8E:9C:AC":
-		"Actalis Authentication Root CA",
-	"F4:8B:11:BF:DE:AB:BE:94:54:20:71:E6:41:DE:6B:BE:88:2B:40:B9":
-		"Taiwan GRCA",
-	"F5:17:A2:4F:9A:48:C6:C9:F8:A2:00:26:9F:DC:0F:48:2C:AB:30:89":
-		"DigiCert Assured ID Root G3",
-	"F6:10:84:07:D6:F8:BB:67:98:0C:C2:E2:44:C2:EB:AE:1C:EF:63:BE":
-		"Amazon Root CA 4",
-	"F9:B5:B6:32:45:5F:9C:BE:EC:57:5F:80:DC:E9:6E:2C:C7:B2:78:B7":
-		"AffirmTrust Commercial",
-	"FA:B7:EE:36:97:26:62:FB:2D:B0:2A:F6:BF:03:FD:E8:7C:4B:2F:9B":
-		"certSIGN ROOT CA",
-	"FE:45:65:9B:79:03:5B:98:A1:61:B5:51:2E:AC:DA:58:09:48:22:4D":
-		"Hellenic Academic and Research Institutions RootCA 2011",
-	"FF:BD:CD:E7:82:C8:43:5E:3C:6F:26:86:5C:CA:A8:3A:45:5B:C3:0A":
-		"TrustCor RootCert CA-1"
-	},
-	"nss": {},
-	"openssl": {}
-}
\ No newline at end of file
diff --git a/client/site_tests/security_RootCA/control b/client/site_tests/security_RootCA/control
deleted file mode 100644
index f428faa..0000000
--- a/client/site_tests/security_RootCA/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME='SHORT'
-AUTHOR = 'The Chromium OS Authors'
-DOC = """
-Ensures that the built-in Root CAs matches the whitelist for CrOS.
-"""
-NAME = 'security_RootCA'
-PURPOSE = 'Ensures that the built-in Root CAs matches the whitelist for CrOS.'
-CRITERIA = """
-Fail if the nss/openssl databases fail to match each other or the whitelist.
-"""
-ATTRIBUTES = "suite:bvt-inline, suite:smoke"
-TEST_CLASS = 'security'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-JOB_RETRIES = 2
-
-job.run_test('security_RootCA', opts=args)
-
diff --git a/client/site_tests/security_RootCA/format_baseline.py b/client/site_tests/security_RootCA/format_baseline.py
deleted file mode 100755
index eb39c96..0000000
--- a/client/site_tests/security_RootCA/format_baseline.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import re
-
-
-def dump_baseline(baseline):
-  """Pretty dumps baseline as a JSON string."""
-  dump = json.dumps(baseline, indent=4, ensure_ascii=False,
-                    sort_keys=True, separators=(',', ': ')).encode('utf-8')
-  # Replace leading spaces with tabs.
-  with_tabs = re.sub(r'\n +', lambda match: '\n\t', dump)
-  # Put values on a separate line.
-  with_newlines = re.sub(r': (?!{)', lambda match: ':\n\t\t', with_tabs)
-  return with_newlines
-
-
-if __name__ == '__main__':
-  with open('./baseline.json', 'r+') as fp:
-    baseline = json.load(fp)
-    formatted = dump_baseline(baseline)
-    fp.seek(0)
-    fp.write(formatted)
-    fp.truncate()
diff --git a/client/site_tests/security_RootCA/security_RootCA.py b/client/site_tests/security_RootCA/security_RootCA.py
deleted file mode 100644
index 4ae36aa..0000000
--- a/client/site_tests/security_RootCA/security_RootCA.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob, json, logging, os, re, stat
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import pexpect
-
-
-DEFAULT_BASELINE = 'baseline.json'
-
-FINGERPRINT_RE = re.compile(r'Fingerprint \(SHA1\):\n\s+(\b[:\w]+)\b')
-NSS_ISSUER_RE = re.compile(r'Object Token:(.+?)\s+C,.?,.?')
-
-NSSCERTUTIL = '/usr/local/bin/certutil'
-NSSMODUTIL = '/usr/local/bin/modutil'
-OPENSSL = '/usr/bin/openssl'
-
-# This glob pattern is coupled to the snprintf() format in
-# get_cert_by_subject() in crypto/x509/by_dir.c in the OpenSSL
-# sources.  In theory the glob can catch files not created by that
-# snprintf(); such file names probably shouldn't be allowed to exist
-# anyway.
-OPENSSL_CERT_GLOB = '/etc/ssl/certs/' + '[0-9a-f]' * 8 + '.*'
-
-
-class security_RootCA(test.test):
-    """Verifies that the root CAs trusted by both NSS and OpenSSL
-       match the expected set."""
-    version = 1
-
-    def get_baseline_sets(self, baseline_file):
-        """Returns a dictionary of sets. The keys are the names of
-           the ssl components and the values are the sets of fingerprints
-           we expect to find in that component's Root CA list.
-
-           @param baseline_file: name of JSON file containing baseline.
-        """
-        baselines = {'nss': {}, 'openssl': {}}
-        baseline_file = open(os.path.join(self.bindir, baseline_file))
-        raw_baselines = json.load(baseline_file)
-        for i in ['nss', 'openssl']:
-            baselines[i].update(raw_baselines[i])
-            baselines[i].update(raw_baselines['both'])
-        return baselines
-
-    def get_nss_certs(self):
-        """
-        Returns the dict of certificate fingerprints observed in NSS,
-        or None if NSS is not available.
-        """
-        tmpdir = self.tmpdir
-
-        nss_shlib_glob = glob.glob('/usr/lib*/libnssckbi.so')
-        if len(nss_shlib_glob) == 0:
-            return None
-        elif len(nss_shlib_glob) > 1:
-            logging.warn("Found more than one copy of libnssckbi.so")
-
-        # Create new empty cert DB.
-        child = pexpect.spawn('"%s" -N -d %s' % (NSSCERTUTIL, tmpdir))
-        child.expect('Enter new password:')
-        child.sendline('foo')
-        child.expect('Re-enter password:')
-        child.sendline('foo')
-        child.close()
-
-        # Add the certs found in the compiled NSS shlib to a new module in DB.
-        cmd = ('"%s" -add testroots -libfile %s -dbdir %s' %
-               (NSSMODUTIL, nss_shlib_glob[0], tmpdir))
-        nssmodutil = pexpect.spawn(cmd)
-        nssmodutil.expect('\'q <enter>\' to abort, or <enter> to continue:')
-        nssmodutil.sendline('\n')
-        ret = utils.system_output(NSSMODUTIL + ' -list '
-                                  '-dbdir %s' % tmpdir)
-        self.assert_('2. testroots' in ret)
-
-        # Dump out the list of root certs.
-        all_certs = utils.system_output(NSSCERTUTIL +
-                                        ' -L -d %s -h all' % tmpdir,
-                                        retain_output=True)
-        certdict = {}  # A map of {SHA1_Fingerprint : CA_Nickname}.
-        cert_matches = NSS_ISSUER_RE.findall(all_certs)
-        logging.debug('NSS_ISSUER_RE.findall returned: %s', cert_matches)
-        for cert in cert_matches:
-            cert_dump = utils.system_output(NSSCERTUTIL +
-                                            ' -L -d %s -n '
-                                            '\"Builtin Object Token:%s\"' %
-                                            (tmpdir, cert), retain_output=True)
-            matches = FINGERPRINT_RE.findall(cert_dump)
-            for match in matches:
-                certdict[match] = cert
-        return certdict
-
-
-    def get_openssl_certs(self):
-        """Returns the dict of certificate fingerprints observed in OpenSSL."""
-        fingerprint_cmd = ' '.join([OPENSSL, 'x509', '-fingerprint',
-                                    '-issuer', '-noout',
-                                    '-in %s'])
-        certdict = {}  # A map of {SHA1_Fingerprint : CA_Nickname}.
-
-        for certfile in glob.glob(OPENSSL_CERT_GLOB):
-            f, i = utils.system_output(fingerprint_cmd % certfile,
-                                       retain_output=True).splitlines()
-            fingerprint = f.split('=')[1]
-            for field in i.split('/'):
-                items = field.split('=')
-                # Compensate for stupidly malformed issuer fields.
-                if len(items) > 1:
-                    if items[0] == 'CN':
-                        certdict[fingerprint] = items[1]
-                        break
-                    elif items[0] == 'O':
-                        certdict[fingerprint] = items[1]
-                        break
-                else:
-                    logging.warning('Malformed issuer string %s', i)
-            # Check that we found a name for this fingerprint.
-            if not fingerprint in certdict:
-                raise error.TestFail('Couldn\'t find issuer string for %s' %
-                                     fingerprint)
-        return certdict
-
-
-    def cert_perms_errors(self):
-        """Returns True if certificate files have bad permissions."""
-        # Acts as a regression check for crosbug.com/19848
-        has_errors = False
-        for certfile in glob.glob(OPENSSL_CERT_GLOB):
-            s = os.stat(certfile)
-            if s.st_uid != 0 or stat.S_IMODE(s.st_mode) != 0644:
-                logging.error("Bad permissions: %s",
-                              utils.system_output("ls -lH %s" % certfile))
-                has_errors = True
-
-        return has_errors
-
-
-    def run_once(self, opts=None):
-        """Test entry point.
-        
-            Accepts 2 optional args, e.g. test_that --args="relaxed
-            baseline=foo".  Parses the args array and invokes the main test
-            method.
-
-           @param opts: string containing command line arguments.
-        """
-        args = {'baseline': DEFAULT_BASELINE}
-        if opts:
-            args.update(dict([[k, v] for (k, e, v) in
-                              [x.partition('=') for x in opts]]))
-
-        self.verify_rootcas(baseline_file=args['baseline'],
-                            exact_match=('relaxed' not in args))
-
-
-    def verify_rootcas(self, baseline_file=DEFAULT_BASELINE, exact_match=True):
-        """Verify installed Root CA's all appear on a specified whitelist.
-           Covers both NSS and OpenSSL.
-
-           @param baseline_file: name of baseline file to use in verification.
-           @param exact_match: boolean indicating if expected-but-missing CAs
-                               should cause test failure. Defaults to True.
-        """
-        testfail = False
-
-        # Dump certificate info and run comparisons.
-        seen = {}
-        nss_store = self.get_nss_certs()
-        openssl_store = self.get_openssl_certs()
-        if nss_store is not None:
-            seen['nss'] = nss_store
-        if openssl_store is not None:
-            seen['openssl'] = openssl_store
-
-        # Merge all 4 dictionaries (seen-nss, seen-openssl, expected-nss,
-        # and expected-openssl) into 1 so we have 1 place to lookup
-        # fingerprint -> comment for logging purposes.
-        expected = self.get_baseline_sets(baseline_file)
-        cert_details = {}
-        for store in seen.keys():
-            for certdict in [expected, seen]:
-                cert_details.update(certdict[store])
-                certdict[store] = set(certdict[store])
-
-        for store in seen.keys():
-            missing = expected[store].difference(seen[store])
-            unexpected = seen[store].difference(expected[store])
-            if unexpected or (missing and exact_match):
-                testfail = True
-                logging.error('Results for %s', store)
-                logging.error('Unexpected')
-                for i in unexpected:
-                    logging.error('"%s": "%s"', i, cert_details[i])
-                if exact_match:
-                    logging.error('Missing')
-                    for i in missing:
-                        logging.error('"%s": "%s"', i, cert_details[i])
-
-        # cert_perms_errors() call first to avoid short-circuiting.
-        # Short circuiting could mask additional failures that would
-        # require a second build/test iteration to uncover.
-        if self.cert_perms_errors() or testfail:
-            raise error.TestFail('Unexpected Root CA findings')
diff --git a/client/site_tests/security_RootCA/update_baseline.py b/client/site_tests/security_RootCA/update_baseline.py
deleted file mode 100755
index 47d611a..0000000
--- a/client/site_tests/security_RootCA/update_baseline.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A script to update "baseline" with new openssl roots.
-
-Usage:
-  ./add_openssl_roots.py ./baseline.json /path/to/new/roots/
-
-This reads the NSS store from baseline, updates the openssl section with certs
-from /path/to/new/certs, and updates the diffs between NSS and openssl. It
-updates the baseline file in place.
-
-/path/to/new/roots can be the unpacked certificate directory from
-app-misc/ca-certificates, or
-chroot/build/${BOARD}/usr/share/ca-certificates/mozilla/ if you have emerged the
-upgraded package for ${BOARD}.
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import argparse
-import glob
-import json
-import os
-import subprocess
-import sys
-
-from format_baseline import dump_baseline
-
-
-SSL_CMD = ['openssl', 'x509', '-fingerprint', '-noout', '-in']
-
-
-def get_nss_store(baseline_file):
-  """Parses baseline for the NSS store."""
-  with open(baseline_file) as f:
-    baseline = json.load(f)
-
-  nss_store = baseline['nss'].copy()
-  nss_store.update(baseline['both'])
-  return nss_store
-
-
-def get_openssl_store(certs_dir):
-  """Gets the new openssl store that should be updated to."""
-  openssl_store = {}
-  for cert in glob.glob(os.path.join(certs_dir, '*.crt')):
-    cn = os.path.basename(cert)  # certs are named after their common names
-    cn = cn.replace('_', ' ').replace('.crt', '')
-    fingerprint = subprocess.check_output(
-        SSL_CMD + [cert]).strip().partition('=')[2]
-    openssl_store[fingerprint.decode('utf-8')] = cn.decode('utf-8')
-  return openssl_store
-
-
-def store_diff(store_a, store_b):
-  """Returns certs in store_a but not store_b."""
-  fingerprints_a = set(store_a.keys())
-  fingerprints_b = set(store_b.keys())
-  a_min_b = fingerprints_a - fingerprints_b
-  return dict((fingerprint, store_a[fingerprint]) for fingerprint in a_min_b)
-
-
-def store_common(store_a, store_b):
-  """Returns certs in both stores."""
-  fingerprints_a = set(store_a.keys())
-  fingerprints_b = set(store_b.keys())
-  a_and_b = fingerprints_a & fingerprints_b
-  return dict((fingerprint, store_a[fingerprint]) for fingerprint in a_and_b)
-
-
-def parse_args(argv):
-  """Parses command line arguments."""
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('baseline', default='./baseline.json',
-                      help='Path to baseline file')
-  parser.add_argument('rootsdir',
-                      help='Directory to the openssl certs to be installed')
-  opts = parser.parse_args(argv)
-  return opts
-
-
-def main(argv):
-  """The main function."""
-  opts = parse_args(argv)
-  nss_store = get_nss_store(opts.baseline)
-  openssl_store = get_openssl_store(opts.rootsdir)
-
-  new_baseline = {
-      u'both': store_common(openssl_store, nss_store),
-      u'nss': store_diff(nss_store, openssl_store),
-      u'openssl': store_diff(openssl_store, nss_store),
-  }
-  serialized = dump_baseline(new_baseline)
-  with open(opts.baseline, 'w') as f:
-    f.write(serialized)
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/client/site_tests/security_RunOci/control b/client/site_tests/security_RunOci/control
deleted file mode 100644
index 9382af0..0000000
--- a/client/site_tests/security_RunOci/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Tests features of the run_oci tool.
-"""
-NAME = "security_RunOci"
-PURPOSE = "Regression and integration tests of run_oci."
-CRITERIA = """
-Fail if the OCI configuration isn't configured correctly.
-"""
-ATTRIBUTES = "suite:bvt-cq"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-job.run_test("security_RunOci")
diff --git a/client/site_tests/security_RunOci/security_RunOci.py b/client/site_tests/security_RunOci/security_RunOci.py
deleted file mode 100644
index 04d9a35..0000000
--- a/client/site_tests/security_RunOci/security_RunOci.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import json
-import logging
-import os
-import pwd
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import autotemp, error
-
-CONFIG_JSON_TEMPLATE = '''
-{
-    "ociVersion": "1.0.0-rc1",
-    "platform": {
-        "os": "linux",
-        "arch": "all"
-    },
-    "process": {
-        "terminal": true,
-        "user": {
-            "uid": 0,
-            "gid": 0
-        },
-        "args": [],
-        "cwd": "/"
-    },
-    "root": {
-        "path": "rootfs",
-        "readonly": true
-    },
-    "hostname": "runc",
-    "mounts": [
-        {
-            "destination": "/",
-            "type": "bind",
-            "source": "/",
-            "options": [
-                "rbind",
-                "ro"
-            ]
-        },
-        {
-            "destination": "/proc",
-            "type": "proc",
-            "source": "proc",
-            "options": [
-                "nodev",
-                "noexec",
-                "nosuid"
-            ]
-        }
-    ],
-    "hooks": {},
-    "linux": {
-        "namespaces": [
-        {
-            "type": "cgroup"
-        },
-        {
-            "type": "pid"
-        },
-        {
-            "type": "network"
-        },
-        {
-            "type": "ipc"
-        },
-        {
-            "type": "user"
-        },
-        {
-            "type": "uts"
-        },
-        {
-            "type": "mount"
-        }
-        ],
-        "resources": {
-            "devices": [
-                {
-                    "allow": false,
-                    "access": "rwm"
-                },
-                {
-                    "allow": true,
-                    "type": "c",
-                    "major": 1,
-                    "minor": 5,
-                    "access": "r"
-                }
-            ]
-        },
-        "uidMappings": [
-        {
-            "hostID": 1000,
-            "containerID": 0,
-            "size": 1
-        }
-        ],
-        "gidMappings": [
-        {
-            "hostID": 1000,
-            "containerID": 0,
-            "size": 1
-        }
-        ]
-    }
-}
-'''
-
-
-class security_RunOci(test.test):
-    """Tests run_oci."""
-
-    version = 1
-
-    preserve_srcdir = True
-
-    def run_test_in_dir(self, test_config, oci_path):
-        """
-        Executes the test in the given directory that points to an OCI image.
-
-        @param test_config: The test's configuration in a dict.
-        @param oci_path: The path of the directory that contains config.json.
-        """
-        result = utils.run(
-            ['/usr/bin/run_oci'] + test_config['run_oci_args'] +
-            ['run', '-c', oci_path, 'test_container'] +
-            test_config.get('program_extra_argv', '').split(),
-            ignore_status=True, stderr_is_expected=True, verbose=True,
-            stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS)
-        expected = test_config['expected_result'].strip()
-        if result.stdout.strip() != expected:
-            logging.error('stdout mismatch %s != %s',
-                          result.stdout.strip(), expected)
-            return False
-        expected_err = test_config.get('expected_stderr', '').strip()
-        if result.stderr.strip() != expected_err:
-            logging.error('stderr mismatch %s != %s',
-                          result.stderr.strip(), expected_err)
-            return False
-        return True
-
-
-    def run_test(self, name, test_config):
-        """
-        Runs one test from the src directory.  Return 0 if the test passes,
-        return 1 on failure.
-
-        @param name: The name of the test.
-        @param test_config: The test's configuration in a dict.
-        """
-        chronos_uid = pwd.getpwnam('chronos').pw_uid
-        td = autotemp.tempdir()
-        os.chown(td.name, chronos_uid, chronos_uid)
-        with open(os.path.join(td.name, 'config.json'), 'w') as config_file:
-            config = json.loads(CONFIG_JSON_TEMPLATE)
-            config['process']['args'] = test_config['program_argv']
-            if 'overrides' in test_config:
-                for path, value in test_config['overrides'].iteritems():
-                    node = config
-                    path = path.split('.')
-                    for component in path[:-1]:
-                        if component not in node:
-                            node[component] = {}
-                        node = node[component]
-                    if (path[-1] in node and
-                            isinstance(node[path[-1]], list) and
-                            isinstance(value, list)):
-                        node[path[-1]].extend(value)
-                    else:
-                        node[path[-1]] = value
-            logging.debug('Running %s with config.json %s',
-                          name, json.dumps(config))
-            json.dump(config, config_file, indent=2)
-        rootfs_path = os.path.join(td.name, 'rootfs')
-        os.mkdir(rootfs_path)
-        os.chown(rootfs_path, chronos_uid, chronos_uid)
-        return self.run_test_in_dir(test_config, td.name)
-
-
-    def run_once(self):
-        """
-        Runs each of the tests specified in the source directory.
-        This test fails if any subtest fails. Sub tests exercise the run_oci
-        command and check that the correct namespace mappings and mounts are
-        made. If any subtest fails, this test will fail.
-        """
-        failed = []
-        ran = 0
-        for p in glob.glob('%s/test-*.json' % self.srcdir):
-            name = os.path.basename(p)
-            logging.info('Running: %s', name)
-            if not self.run_test(name, json.load(file(p))):
-                failed.append(name)
-            ran += 1
-        if ran == 0:
-            failed.append('No tests found to run from %s!' % (self.srcdir))
-        if failed:
-            logging.error('Failed: %s', failed)
-            raise error.TestFail('Failed: %s' % failed)
diff --git a/client/site_tests/security_RunOci/src/test-alt-syscall-settime.json b/client/site_tests/security_RunOci/src/test-alt-syscall-settime.json
deleted file mode 100644
index f8bb538..0000000
--- a/client/site_tests/security_RunOci/src/test-alt-syscall-settime.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers"],
-    "program_argv": ["/bin/date", "-u", "--set", "010101"],
-    "expected_stderr": "date: cannot set date: Function not implemented",
-    "expected_result": "Mon Jan  1 00:00:00 UTC 2001",
-    "overrides": {
-        "linux.altSyscall": "third_party"
-    }
-}
diff --git a/client/site_tests/security_RunOci/src/test-bind-mount-trailing-slash.json b/client/site_tests/security_RunOci/src/test-bind-mount-trailing-slash.json
deleted file mode 100644
index e2cadcd..0000000
--- a/client/site_tests/security_RunOci/src/test-bind-mount-trailing-slash.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers", "--bind_mount=/bin:/var/log/"],
-    "program_argv": ["/bin/ls", "/var/log/bash"],
-    "expected_result": "/var/log/bash"
-}
diff --git a/client/site_tests/security_RunOci/src/test-bind-mount.json b/client/site_tests/security_RunOci/src/test-bind-mount.json
deleted file mode 100644
index e5e2093..0000000
--- a/client/site_tests/security_RunOci/src/test-bind-mount.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers", "--bind_mount=/bin:/var/log"],
-    "program_argv": ["/bin/ls", "/var/log/bash"],
-    "expected_result": "/var/log/bash"
-}
diff --git a/client/site_tests/security_RunOci/src/test-device-cgroup-allow.json b/client/site_tests/security_RunOci/src/test-device-cgroup-allow.json
deleted file mode 100644
index 4369481..0000000
--- a/client/site_tests/security_RunOci/src/test-device-cgroup-allow.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers"],
-    "program_argv": ["/usr/bin/hexdump", "-n16", "/dev/zero"],
-    "expected_result": "0000000 0000 0000 0000 0000 0000 0000 0000 0000\n0000010",
-    "expected_stderr": ""
-}
diff --git a/client/site_tests/security_RunOci/src/test-device-cgroup-deny.json b/client/site_tests/security_RunOci/src/test-device-cgroup-deny.json
deleted file mode 100644
index 34aeac4..0000000
--- a/client/site_tests/security_RunOci/src/test-device-cgroup-deny.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers"],
-    "program_argv": ["/usr/bin/hexdump", "-n1", "/dev/urandom"],
-    "expected_result": "",
-    "expected_stderr": "hexdump: /dev/urandom: Operation not permitted\nhexdump: all input file arguments failed"
-}
diff --git a/client/site_tests/security_RunOci/src/test-device.json b/client/site_tests/security_RunOci/src/test-device.json
deleted file mode 100644
index c52b371..0000000
--- a/client/site_tests/security_RunOci/src/test-device.json
+++ /dev/null
@@ -1,30 +0,0 @@
-{
-	"run_oci_args": ["--cgroup_parent=chronos_containers"],
-	"program_argv": ["/bin/ls", "/dev/null_test"],
-	"expected_result": "/dev/null_test",
-	"overrides": {
-		"mounts": [
-			{
-				"source": "tmpfs",
-				"destination": "/dev",
-				"type": "tmpfs",
-				"options": [
-					"noexec",
-					"nosuid"
-				],
-				"performInIntermediateNamespace": true
-			}
-		],
-		"linux.devices": [
-			{
-				"path" : "/dev/null_test",
-				"type" : "c",
-				"major" : 1,
-				"minor" : 3,
-				"fileMode" : 438,
-				"uid" : 0,
-				"gid" : 0
-			}
-		]
-	}
-}
diff --git a/client/site_tests/security_RunOci/src/test-gid.json b/client/site_tests/security_RunOci/src/test-gid.json
deleted file mode 100644
index 7f2ca85..0000000
--- a/client/site_tests/security_RunOci/src/test-gid.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers"],
-    "program_argv": ["/usr/bin/id", "-g"],
-    "expected_result": "0"
-}
diff --git a/client/site_tests/security_RunOci/src/test-hooks-failure.json b/client/site_tests/security_RunOci/src/test-hooks-failure.json
deleted file mode 100644
index a435eed..0000000
--- a/client/site_tests/security_RunOci/src/test-hooks-failure.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-	"run_oci_args": ["--cgroup_parent=chronos_containers"],
-	"program_argv": ["/bin/echo", "-n", "This should not run"],
-	"expected_result": "",
-	"overrides": {
-		"hooks": {
-			"prestart": [
-				{
-					"path": "/bin/false",
-					"args": ["false"]
-				}
-			]
-		}
-	}
-}
diff --git a/client/site_tests/security_RunOci/src/test-hooks.json b/client/site_tests/security_RunOci/src/test-hooks.json
deleted file mode 100644
index 4326845..0000000
--- a/client/site_tests/security_RunOci/src/test-hooks.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers"],
-    "program_argv": ["/bin/echo", "-n", "3"],
-    "expected_result": "01234",
-    "overrides": {
-	    "hooks": {
-		    "prechroot": [
-			    {
-				    "path": "/bin/echo",
-				    "args": ["echo", "-n", "0"]
-			    }
-		    ],
-		    "prestart": [
-			    {
-				    "path": "/bin/echo",
-				    "args": ["echo", "-n", "1"]
-			    },
-			    {
-				    "path": "/bin/echo",
-				    "args": ["echo", "-n", "2"]
-			    }
-		    ],
-		    "poststop": [
-			    {
-				    "path": "/bin/echo",
-				    "args": ["echo", "-n", "4"]
-			    }
-		    ]
-	    }
-    }
-}
-
diff --git a/client/site_tests/security_RunOci/src/test-uid.json b/client/site_tests/security_RunOci/src/test-uid.json
deleted file mode 100644
index 0ff4174..0000000
--- a/client/site_tests/security_RunOci/src/test-uid.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "run_oci_args": ["--cgroup_parent=chronos_containers"],
-    "program_argv": ["/usr/bin/id", "-u"],
-    "expected_result": "0"
-}
diff --git a/client/site_tests/security_RuntimeExecStack/control b/client/site_tests/security_RuntimeExecStack/control
deleted file mode 100644
index 3f9fc70..0000000
--- a/client/site_tests/security_RuntimeExecStack/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME = "SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = ("This test verifies that no running processes on the image have "
-    "an executable stack. This augments the platform_ToolchainOptions test "
-    "in that it will catch mprotect(..., PROT_EXEC, ...) calls too.")
-NAME = "security_RuntimeExecStack"
-PURPOSE = "To ensure running processes have non-executable stack"
-CRITERIA = ("Fail if a process has an executable stack")
-ATTRIBUTES = "suite:bvt-inline, suite:smoke"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-job.run_test('security_RuntimeExecStack')
diff --git a/client/site_tests/security_RuntimeExecStack/security_RuntimeExecStack.py b/client/site_tests/security_RuntimeExecStack/security_RuntimeExecStack.py
deleted file mode 100755
index c7b66e0..0000000
--- a/client/site_tests/security_RuntimeExecStack/security_RuntimeExecStack.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-import logging
-import os
-import errno
-
-class security_RuntimeExecStack(test.test):
-    """Tests that processes have non-executable stacks
-
-    Examines the /proc/$pid/maps file of all running processes for the
-    stack segments' markings. If "x" is found, it fails.
-    """
-    version = 1
-
-    def check_no_exec_stack(self, maps):
-        """Reads process memory map and checks there are no executable stacks.
-
-        Args:
-            @param maps: opened /proc/<pid>/maps file
-
-        Returns:
-          A tuple containing the error code and a string (usually a single line)
-          with debug information. Error code could be:
-            0: ok: stack not executable (second element will be None)
-            1: error: stack is executable
-            2: error: stack is not writable
-            3: error: stack not found
-        """
-        contents = ''
-        stack_count = 0
-        for line in maps:
-            line = line.strip()
-            contents += line + '\n'
-
-            if '[stack' not in line:
-                continue
-            stack_count += 1
-
-            perms = line.split(' ', 2)[1]
-
-            # Stack segment is executable.
-            if 'x' in perms:
-                return 1, line
-
-            # Sanity check we have stack segment perms.
-            if not 'w' in perms:
-                return 2, line
-
-        if stack_count > 0:
-            # Stack segments are non-executable.
-            return 0, None
-        else:
-            # Should be impossible: no stack segment seen.
-            return 3, contents
-
-    def run_once(self):
-        failed = set([])
-
-        for pid in os.listdir('/proc'):
-            maps_path = '/proc/%s/maps' % (pid)
-            # Is this a pid directory?
-            if not os.path.exists(maps_path):
-                continue
-            # Is this a kernel thread?
-            try:
-                os.readlink('/proc/%s/exe' % (pid))
-            except OSError, e:
-                if e.errno == errno.ENOENT:
-                    continue
-            try:
-                maps = open(maps_path)
-                cmd = open('/proc/%s/cmdline' % (pid)).read()
-            except IOError:
-                # Allow the path to vanish out from under us. If
-                # we've failed for any other reason, raise the failure.
-                if os.path.exists(maps_path):
-                    raise
-                logging.debug('ignored: pid %s vanished', pid)
-                continue
-
-            # Clean up cmdline for reporting.
-            cmd = cmd.replace('\x00', ' ')
-            exe = cmd
-            if ' ' in exe:
-                exe = exe[:exe.index(' ')]
-
-            # Check the stack segment.
-            stack, report = self.check_no_exec_stack(maps)
-
-            # Report outcome.
-            if stack == 0:
-                logging.debug('ok: %s %s', pid, exe)
-            else:
-                logging.info('FAIL: %s %s %s', pid, cmd, report)
-                failed.add(exe)
-
-        if len(failed) != 0:
-            msg = 'Bad stacks segments: %s' % (', '.join(failed))
-            raise error.TestFail(msg)
diff --git a/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py b/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py
index d38b7d5..69708ed 100644
--- a/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py
+++ b/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py
@@ -90,6 +90,9 @@
             # Setup.
             self._set_autotest_ext(cr.autotest_ext)
             self._open_events_page(cr)
+            self._events.expand_page()
+            self._events.set_prevent_defaults(False)
+
             self._emulate_mouse()
             self._center_cursor()
 
diff --git a/client/site_tests/video_PlaybackPerf/video_PlaybackPerf.py b/client/site_tests/video_PlaybackPerf/video_PlaybackPerf.py
index f8dc302..b40781b 100644
--- a/client/site_tests/video_PlaybackPerf/video_PlaybackPerf.py
+++ b/client/site_tests/video_PlaybackPerf/video_PlaybackPerf.py
@@ -21,8 +21,8 @@
 from autotest_lib.client.cros.video import helper_logger
 
 
-DISABLE_ACCELERATED_VIDEO_DECODE_BROWSER_ARGS = [
-        '--disable-accelerated-video-decode']
+DISABLE_ACCELERATED_VIDEO_DECODE_BROWSER_ARGS = '--disable-accelerated-video-decode'
+ENABLE_AUTOPLAY = '--autoplay-policy=no-user-gesture-required'
 DOWNLOAD_BASE = 'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/'
 
 PLAYBACK_WITH_HW_ACCELERATION = 'playback_with_hw_acceleration'
@@ -284,7 +284,8 @@
         keyvals = {}
 
         with chrome.Chrome(
-                extra_browser_args=helper_logger.chrome_vmodule_flag(),
+                extra_browser_args=[helper_logger.chrome_vmodule_flag(),
+                                    ENABLE_AUTOPLAY],
                 init_network_controller=True) as cr:
 
             # crbug/753292 - enforce the idle checks after login
@@ -332,8 +333,9 @@
                 return keyvals
 
         # Start chrome with disabled video hardware decode flag.
-        with chrome.Chrome(extra_browser_args=
+        with chrome.Chrome(extra_browser_args=[
                 DISABLE_ACCELERATED_VIDEO_DECODE_BROWSER_ARGS,
+                ENABLE_AUTOPLAY],
                 init_network_controller=True) as cr:
             hd = histogram_verifier.HistogramDiffer(
                     cr, constants.MEDIA_GVD_INIT_STATUS)
diff --git a/docs/wificell.md b/docs/wificell.md
index e96269f..312e1f6 100644
--- a/docs/wificell.md
+++ b/docs/wificell.md
@@ -35,8 +35,8 @@
 ## What suites should I run?
 
 There are a variety of WiFi-related suites, but developers are commonly
-interested in the functionality (`wifi_matfunc` or its servo-less variant,
-`wifi_matfunc_noservo`) and performance (`wifi_perf`) suites.
+interested in the functionality (`wifi_matfunc`) and performance (`wifi_perf`)
+suites.
 
 ## Configuring DNS entries for test APs
 
diff --git a/global_config.ini b/global_config.ini
index 197cd30..313a31e 100644
--- a/global_config.ini
+++ b/global_config.ini
@@ -105,7 +105,7 @@
 # Enable test result throttling.
 enable_result_throttling: False
 # Default maximum test result size in KB.
-default_max_result_size_KB: 20000
+default_max_result_size_KB: 40000
 
 [CLIENT]
 drop_caches: False
@@ -165,6 +165,7 @@
 # See https://code.google.com/p/chrome-os-partner/issues/detail?q=45875
 kilo_inodes_required_veyron_rialto: 55
 kilo_inodes_required_arkham: 50
+kilo_inodes_required_mistral: 50
 kilo_inodes_required_storm: 50
 kilo_inodes_required_whirlwind: 50
 
diff --git a/server/cros/chaos_ap_list.conf b/server/cros/chaos_ap_list.conf
index 9af4232..6b09d6b 100644
--- a/server/cros/chaos_ap_list.conf
+++ b/server/cros/chaos_ap_list.conf
@@ -146,18 +146,7 @@
 
 # Missing AP chromeos3-row2-rack1-host9
 
-[94:44:52:62:bd:81]
-brand = belkin
-wan_hostname = chromeos3-row2-rack1-host10
-ssid = belkin_f7d2301_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-bss = 94:44:52:62:bd:81
-wan mac = 94:44:52:62:bd:82
-model = f7d1301_v1_2
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
+# Missing AP chromeos3-row2-rack1-host10
 
 [94:44:52:18:f0:a7]
 brand = belkin
diff --git a/server/cros/faft/config/DEFAULTS.py b/server/cros/faft/config/DEFAULTS.py
index b4fe479..04642bd 100644
--- a/server/cros/faft/config/DEFAULTS.py
+++ b/server/cros/faft/config/DEFAULTS.py
@@ -24,26 +24,8 @@
     has_keyboard = True
     has_powerbutton = True
     rec_button_dev_switch = False
-    long_rec_combo = False
-    use_u_boot = False
     ec_capability = list()
-    gbb_version = 1.1
-    wp_voltage = 'pp1800'
     spi_voltage = 'pp1800'
-    key_checker = [[0x29, 'press'],
-                   [0x32, 'press'],
-                   [0x32, 'release'],
-                   [0x29, 'release'],
-                   [0x28, 'press'],
-                   [0x28, 'release']]
-    key_checker_strict = [[0x29, 'press'],
-                          [0x29, 'release'],
-                          [0x32, 'press'],
-                          [0x32, 'release'],
-                          [0x28, 'press'],
-                          [0x28, 'release'],
-                          [0x61, 'press'],
-                          [0x61, 'release']]
 
     # Has eventlog support including proper timestamps. (Only for old boards!
     # Never disable this "temporarily, until we get around to implementing it"!)
@@ -60,15 +42,9 @@
     # Delay between keypresses in firmware screen
     confirm_screen = 3
 
-    # Delay between passing firmware screen and text mode warning screen
-    legacy_text_screen = 20
-
     # The developer screen timeouts fit our spec
     dev_screen_timeout = 30
 
-    # Delay for waiting beep done
-    beep = 1
-
     # Delay between power-on and plug USB
     usb_plug = 10
 
@@ -100,12 +76,6 @@
     # (also known as SHORT_DELAY in hdctools)
     hold_pwr_button_poweron = 0.2
 
-    # devserver startup time
-    devserver = 10
-
-    # Delay for user to power cycle the device
-    user_power_cycle = 20
-
     # Delay after /sbin/shutdown before pressing power button
     powerup_ready = 10
 
diff --git a/server/cros/faft/config/atlas.py b/server/cros/faft/config/atlas.py
index 850f829..a8aa856 100644
--- a/server/cros/faft/config/atlas.py
+++ b/server/cros/faft/config/atlas.py
@@ -10,7 +10,6 @@
     ec_capability = ['battery', 'charging',
                      'keyboard', 'lid', 'x86' ]
     firmware_screen = 15
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     dark_resume_capable = True
diff --git a/server/cros/faft/config/auron.py b/server/cros/faft/config/auron.py
index 2ec8e8b..ac8c92c 100644
--- a/server/cros/faft/config/auron.py
+++ b/server/cros/faft/config/auron.py
@@ -12,6 +12,5 @@
     dark_resume_capable = True
     ec_capability = ['adc_ectemp', 'battery', 'charging',
                      'keyboard', 'lid', 'x86', 'usb', 'peci']
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     smm_store = False
diff --git a/server/cros/faft/config/cyan.py b/server/cros/faft/config/cyan.py
index d258830..e9326cb 100644
--- a/server/cros/faft/config/cyan.py
+++ b/server/cros/faft/config/cyan.py
@@ -13,7 +13,5 @@
                      'usb', 'smart_usb_charge']
     firmware_screen = 28
     usb_plug = 28
-    long_rec_combo = True
-    wp_voltage = 'pp1800'
     spi_voltage = 'pp1800'
     smm_store = False
diff --git a/server/cros/faft/config/dragonegg.py b/server/cros/faft/config/dragonegg.py
index 5a5e1bc..aabd0ce 100644
--- a/server/cros/faft/config/dragonegg.py
+++ b/server/cros/faft/config/dragonegg.py
@@ -10,7 +10,6 @@
     ec_capability = ['battery', 'charging',
                      'keyboard', 'lid', 'x86', 'usb', 'smart_usb_charge']
     firmware_screen = 15
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     dark_resume_capable = True
diff --git a/server/cros/faft/config/eve.py b/server/cros/faft/config/eve.py
index a8456ae..c79ba1b 100644
--- a/server/cros/faft/config/eve.py
+++ b/server/cros/faft/config/eve.py
@@ -9,7 +9,6 @@
     chrome_ec = True
     ec_capability = ['battery', 'charging', 'doubleboot',
                      'keyboard', 'lid', 'x86' ]
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     dark_resume_capable = True
diff --git a/server/cros/faft/config/fizz.py b/server/cros/faft/config/fizz.py
index 705a960..7e4e880 100644
--- a/server/cros/faft/config/fizz.py
+++ b/server/cros/faft/config/fizz.py
@@ -13,7 +13,6 @@
     has_keyboard = False
     rec_button_dev_switch = True
     spi_voltage = 'pp3300'
-    wp_voltage = 'pp3300'
     chrome_ec = True
     ec_capability = ['x86']
     delay_reboot_to_ping = 40
diff --git a/server/cros/faft/config/glados.py b/server/cros/faft/config/glados.py
index fbe7d07..0f7c714 100644
--- a/server/cros/faft/config/glados.py
+++ b/server/cros/faft/config/glados.py
@@ -11,7 +11,6 @@
     dark_resume_capable = True
     ec_capability = ['battery', 'charging', 'doubleboot',
                      'keyboard', 'lid', 'x86', 'usbpd_uart' ]
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     smm_store = False
diff --git a/server/cros/faft/config/gnawty.py b/server/cros/faft/config/gnawty.py
index bec3fc0..d942b15 100644
--- a/server/cros/faft/config/gnawty.py
+++ b/server/cros/faft/config/gnawty.py
@@ -9,5 +9,4 @@
 
 class Values(rambi.Values):
     """Inherit overrides from rambi."""
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
diff --git a/server/cros/faft/config/hatch.py b/server/cros/faft/config/hatch.py
index 60c50a3..aca2339 100644
--- a/server/cros/faft/config/hatch.py
+++ b/server/cros/faft/config/hatch.py
@@ -10,7 +10,6 @@
     ec_capability = ['battery', 'charging',
                      'keyboard', 'lid', 'x86', 'usb', 'smart_usb_charge']
     firmware_screen = 15
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     dark_resume_capable = True
     custom_usb_enable_names = ['EN_USB_A_5V']
diff --git a/server/cros/faft/config/jecht.py b/server/cros/faft/config/jecht.py
index 9b603e8..3beb0a1 100644
--- a/server/cros/faft/config/jecht.py
+++ b/server/cros/faft/config/jecht.py
@@ -11,5 +11,4 @@
     has_keyboard = False
     rec_button_dev_switch = True
     spi_voltage = 'pp3300'
-    wp_voltage = 'pp3300'
     smm_store = False
diff --git a/server/cros/faft/config/kitty.py b/server/cros/faft/config/kitty.py
index 6401ec6..27d21a3 100644
--- a/server/cros/faft/config/kitty.py
+++ b/server/cros/faft/config/kitty.py
@@ -7,6 +7,7 @@
 from autotest_lib.server.cros.faft.config import nyan
 
 class Values(nyan.Values):
+    """FAFT config values for Kitty."""
     ec_capability = ['arm']
     firmware_screen = 7
     has_lid = False
diff --git a/server/cros/faft/config/kunimitsu.py b/server/cros/faft/config/kunimitsu.py
index c1f79c8..9e20d2d 100644
--- a/server/cros/faft/config/kunimitsu.py
+++ b/server/cros/faft/config/kunimitsu.py
@@ -8,7 +8,6 @@
     """FAFT config values for Kunimitsu."""
     chrome_ec = True
     dark_resume_capable = True
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     ec_boot_to_console = 0.2
     ec_capability = ['battery', 'charging', 'doubleboot', 'keyboard',
diff --git a/server/cros/faft/config/link.py b/server/cros/faft/config/link.py
index b626ad2..736c1d1 100644
--- a/server/cros/faft/config/link.py
+++ b/server/cros/faft/config/link.py
@@ -9,12 +9,9 @@
     """FAFT config values for Link."""
     firmware_screen = 7
     chrome_ec = True
-    long_rec_combo = True
-    use_u_boot = True
     dark_resume_capable = True
     ec_capability = ['adc_ectemp', 'battery', 'charging',
                      'keyboard', 'lid', 'x86', 'thermal',
                      'usb', 'peci', 'kblight', 'smart_usb_charge']
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     smm_store = False
diff --git a/server/cros/faft/config/nami.py b/server/cros/faft/config/nami.py
index 3024534..b506737 100644
--- a/server/cros/faft/config/nami.py
+++ b/server/cros/faft/config/nami.py
@@ -10,7 +10,6 @@
     ec_capability = ['battery', 'charging',
                      'keyboard', 'lid', 'x86' ]
     firmware_screen = 15
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     dark_resume_capable = True
diff --git a/server/cros/faft/config/nautilus.py b/server/cros/faft/config/nautilus.py
index 0fa5039..e7eeba3 100644
--- a/server/cros/faft/config/nautilus.py
+++ b/server/cros/faft/config/nautilus.py
@@ -10,7 +10,6 @@
     ec_capability = ['battery', 'charging',
                      'keyboard', 'lid', 'x86' ]
     firmware_screen = 15
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     delay_reboot_to_ping = 40
diff --git a/server/cros/faft/config/ninja.py b/server/cros/faft/config/ninja.py
index 80cdab0..fc81f25 100644
--- a/server/cros/faft/config/ninja.py
+++ b/server/cros/faft/config/ninja.py
@@ -13,5 +13,4 @@
     has_lid = False
     has_keyboard = False
     rec_button_dev_switch = True
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
diff --git a/server/cros/faft/config/nyan.py b/server/cros/faft/config/nyan.py
index a926cc1..36ad472 100644
--- a/server/cros/faft/config/nyan.py
+++ b/server/cros/faft/config/nyan.py
@@ -13,5 +13,4 @@
     ec_boot_to_console = 0.2
     ec_has_powerbtn_cmd = False
     firmware_screen = 4
-    wp_voltage = 'off'
     has_eventlog = False        # Shipped without RTC support in firmware
diff --git a/server/cros/faft/config/octopus.py b/server/cros/faft/config/octopus.py
index bbc3798..aac18aa 100644
--- a/server/cros/faft/config/octopus.py
+++ b/server/cros/faft/config/octopus.py
@@ -12,8 +12,6 @@
                      'usb', 'smart_usb_charge']
     firmware_screen = 25 # Time from deasserting cold_reset to firmware_screen being shown
     usb_plug = 45
-    long_rec_combo = True
-    wp_voltage = 'pp1800'
     spi_voltage = 'pp1800'
     custom_usb_enable_names = ['EN_USB_A0_5V', 'EN_USB_A1_5V']
     smm_store = False
diff --git a/server/cros/faft/config/panther.py b/server/cros/faft/config/panther.py
index 149b54f..7f3dd37 100644
--- a/server/cros/faft/config/panther.py
+++ b/server/cros/faft/config/panther.py
@@ -11,5 +11,4 @@
     has_keyboard = False
     rec_button_dev_switch = True
     spi_voltage = 'pp3300'
-    wp_voltage = 'pp3300'
     smm_store = False
diff --git a/server/cros/faft/config/parrot.py b/server/cros/faft/config/parrot.py
index b502054..98102a5 100644
--- a/server/cros/faft/config/parrot.py
+++ b/server/cros/faft/config/parrot.py
@@ -12,10 +12,4 @@
     ec_boot_to_console = 4
 
     dark_resume_capable = True
-    key_checker = [[0x29, 'press'],
-                   [0x32, 'press'],
-                   [0x32, 'release'],
-                   [0x29, 'release'],
-                   [0x47, 'press'],
-                   [0x47, 'release']]
     smm_store = False
diff --git a/server/cros/faft/config/quawks.py b/server/cros/faft/config/quawks.py
index 1b30718..b0fbc91 100644
--- a/server/cros/faft/config/quawks.py
+++ b/server/cros/faft/config/quawks.py
@@ -9,5 +9,3 @@
 
 class Values(rambi.Values):
     """Inherit overrides from rambi."""
-    """Configure servo-controlled WP pin as open drain"""
-    wp_voltage = 'off'
diff --git a/server/cros/faft/config/rambi.py b/server/cros/faft/config/rambi.py
index d0e138c..c3069e8 100644
--- a/server/cros/faft/config/rambi.py
+++ b/server/cros/faft/config/rambi.py
@@ -12,7 +12,5 @@
     ec_capability = ['battery', 'charging', 'keyboard', 'lid', 'x86',
                      'usb', 'smart_usb_charge']
     firmware_screen = 7
-    long_rec_combo = True
-    wp_voltage = 'pp1800'
     spi_voltage = 'pp1800'
     smm_store = False
diff --git a/server/cros/faft/config/rammus.py b/server/cros/faft/config/rammus.py
index 45ed650..dfbcb39 100644
--- a/server/cros/faft/config/rammus.py
+++ b/server/cros/faft/config/rammus.py
@@ -10,7 +10,6 @@
     ec_capability = ['battery', 'charging',
                      'keyboard', 'lid', 'x86' ]
     firmware_screen = 15
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     dark_resume_capable = True
diff --git a/server/cros/faft/config/reef.py b/server/cros/faft/config/reef.py
index 959483a..b0fcf22 100644
--- a/server/cros/faft/config/reef.py
+++ b/server/cros/faft/config/reef.py
@@ -12,7 +12,5 @@
                      'usb', 'smart_usb_charge']
     firmware_screen = 25 # Time from deasserting cold_reset to firmware_screen being shown
     usb_plug = 45
-    long_rec_combo = True
-    wp_voltage = 'pp1800'
     spi_voltage = 'pp1800'
     smm_store = False
diff --git a/server/cros/faft/config/samus.py b/server/cros/faft/config/samus.py
index b0449d0..438279a 100644
--- a/server/cros/faft/config/samus.py
+++ b/server/cros/faft/config/samus.py
@@ -15,7 +15,6 @@
                      'lid', 'x86', 'usb', 'peci', 'smart_usb_charge',
                      'usbpd_uart' ]
     firmware_screen = 12
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     servo_prog_state_delay = 10
     smm_store = False
diff --git a/server/cros/faft/config/sarien.py b/server/cros/faft/config/sarien.py
index 78d1d60..cf9c8fe 100644
--- a/server/cros/faft/config/sarien.py
+++ b/server/cros/faft/config/sarien.py
@@ -12,7 +12,6 @@
     has_lid = True
     lid_wake_from_power_off = False
     spi_voltage = 'pp3300'
-    wp_voltage = 'pp3300'
     # Not a Chrome EC, do not expect keyboard via EC
     chrome_ec = False
     ec_capability = []
@@ -22,3 +21,6 @@
     smm_store = False
     # The EC image is stored in the AP SPI chip, so flashrom -p ec won't work.
     ap_access_ec_flash = False
+    # Depthcharge USB stack can drop keys that come in too fast and get stuck
+    # exiting developer mode if the delay for confirmation screen is too short.
+    confirm_screen = 11
diff --git a/server/cros/faft/config/skate.py b/server/cros/faft/config/skate.py
index d2dbdde..b8f1bc4 100644
--- a/server/cros/faft/config/skate.py
+++ b/server/cros/faft/config/skate.py
@@ -9,7 +9,6 @@
     """FAFT config values for Skate."""
     software_sync_update = 6
     chrome_ec = True
-    use_u_boot = True
     ec_capability = (['battery', 'keyboard', 'arm', 'lid'])
     ec_has_powerbtn_cmd = False
     has_eventlog = False        # No RTC support in firmware
diff --git a/server/cros/faft/config/slippy.py b/server/cros/faft/config/slippy.py
index 3f4682d..92a1bd1 100644
--- a/server/cros/faft/config/slippy.py
+++ b/server/cros/faft/config/slippy.py
@@ -14,6 +14,5 @@
     ec_capability = ['adc_ectemp', 'battery', 'charging',
                             'keyboard', 'lid', 'x86', 'thermal',
                             'usb', 'peci']
-    wp_voltage = 'pp3300'
     spi_voltage = 'pp3300'
     smm_store = False
diff --git a/server/cros/faft/config/snow.py b/server/cros/faft/config/snow.py
index 9751f33..9d987ba 100644
--- a/server/cros/faft/config/snow.py
+++ b/server/cros/faft/config/snow.py
@@ -11,5 +11,4 @@
     ec_capability = (['battery', 'keyboard', 'arm'])
     ec_boot_to_console = 0.4
     ec_has_powerbtn_cmd = False
-    use_u_boot = True
     has_eventlog = False
diff --git a/server/cros/faft/config/spring.py b/server/cros/faft/config/spring.py
index 279c07a..aca24c8 100644
--- a/server/cros/faft/config/spring.py
+++ b/server/cros/faft/config/spring.py
@@ -9,7 +9,6 @@
     """FAFT config values for Spring."""
     software_sync_update = 6
     chrome_ec = True
-    use_u_boot = True
     ec_capability = (['battery', 'keyboard', 'arm', 'lid'])
     ec_has_powerbtn_cmd = False
     has_eventlog = False        # No RTC support in firmware
diff --git a/server/cros/faft/config/stout.py b/server/cros/faft/config/stout.py
index ef1be89..6f87357 100644
--- a/server/cros/faft/config/stout.py
+++ b/server/cros/faft/config/stout.py
@@ -7,10 +7,4 @@
 
 class Values():
     """FAFT config values for Stout."""
-    key_checker = [[0x29, 'press'],
-                   [0x32, 'press'],
-                   [0x32, 'release'],
-                   [0x29, 'release'],
-                   [0x43, 'press'],
-                   [0x43, 'release']]
     smm_store = False
diff --git a/server/cros/faft/config/strago.py b/server/cros/faft/config/strago.py
index 2863246..55ba467 100644
--- a/server/cros/faft/config/strago.py
+++ b/server/cros/faft/config/strago.py
@@ -12,7 +12,5 @@
                      'usb', 'smart_usb_charge']
     firmware_screen = 25 # Time from deasserting cold_reset to firmware_screen being shown
     usb_plug = 45
-    long_rec_combo = True
-    wp_voltage = 'pp1800'
     spi_voltage = 'pp1800'
     smm_store = False
diff --git a/server/cros/faft/config/veyron.py b/server/cros/faft/config/veyron.py
index 26fb873..2bbe532 100644
--- a/server/cros/faft/config/veyron.py
+++ b/server/cros/faft/config/veyron.py
@@ -8,6 +8,5 @@
 class Values(object):
     """FAFT config values for Veyron."""
     spi_voltage = 'pp3300'
-    wp_voltage = 'pp3300'
     confirm_screen = 6
     has_eventlog = False        # for chrome-os-partner:61078
diff --git a/server/cros/faft/cr50_test.py b/server/cros/faft/cr50_test.py
index 4e66b18..5179e67 100644
--- a/server/cros/faft/cr50_test.py
+++ b/server/cros/faft/cr50_test.py
@@ -54,11 +54,9 @@
                                     'access to the Cr50 console')
 
         logging.info('Test Args: %r', full_args)
-        self.ccd_lockout = full_args.get('ccd_lockout', '').lower() == 'true'
-        logging.info('ccd is%s locked out', '' if self.ccd_lockout else ' not')
 
         self.can_set_ccd_level = (not self.cr50.using_ccd() or
-            self.cr50.testlab_is_on()) and not self.ccd_lockout
+            self.cr50.testlab_is_on())
         self.original_ccd_level = self.cr50.get_ccd_level()
         self.original_ccd_settings = self.cr50.get_cap_dict(
                 info=self.cr50.CAP_SETTING)
@@ -416,6 +414,13 @@
 
     def _restore_cr50_state(self):
         """Restore cr50 state, so the device can be used for further testing"""
+        state_mismatch = self._check_original_state()
+        if state_mismatch and not self._provision_update:
+            self._restore_original_state()
+            if self._raise_error_on_mismatch:
+                raise error.TestError('Unexpected state mismatch during '
+                                      'cleanup %s' % state_mismatch)
+
         # Try to open cr50 and enable testlab mode if it isn't enabled.
         try:
             self.fast_open(True)
@@ -443,13 +448,6 @@
         tpm_utils.ClearTPMOwnerRequest(self.host, wait_for_ready=True)
         self.clear_fwmp()
 
-        state_mismatch = self._check_original_state()
-        if state_mismatch and not self._provision_update:
-            self._restore_original_state()
-            if self._raise_error_on_mismatch:
-                raise error.TestError('Unexpected state mismatch during '
-                                      'cleanup %s' % state_mismatch)
-
         # Restore the ccd privilege level
         if hasattr(self, 'original_ccd_level'):
             self._reset_ccd_settings()
@@ -638,7 +636,7 @@
         # Running the update may cause cr50 to reboot. Wait for that before
         # sending more commands. The reboot should happen quickly. Wait a
         # maximum of 10 seconds.
-        self.cr50.wait_for_reboot(10)
+        self.cr50.wait_for_reboot(timeout=10)
 
         if erase_nvmem and rollback:
             self.cr50.erase_nvmem()
@@ -886,3 +884,9 @@
             self.host.run('cryptohome --action=tpm_wait_ownership')
         self.host.run('cryptohome '
                       '--action=remove_firmware_management_parameters')
+
+    def tpm_is_responsive(self):
+        """Check TPM responsiveness by running tpm_version."""
+        result = self.host.run('tpm_version', ignore_status=True)
+        logging.debug(result.stdout.strip())
+        return not result.exit_status
diff --git a/server/cros/faft/firmware_test.py b/server/cros/faft/firmware_test.py
index 16a9a64..15da555 100644
--- a/server/cros/faft/firmware_test.py
+++ b/server/cros/faft/firmware_test.py
@@ -226,7 +226,7 @@
         if hasattr(self, 'cr50'):
             system_info['cr50_version'] = self.servo.get('cr50_version')
 
-        logging.info('System info:\n' + pprint.pformat(system_info))
+        logging.info('System info:\n%s', pprint.pformat(system_info))
         self.write_attr_keyval(system_info)
 
     def invalidate_firmware_setup(self):
@@ -646,14 +646,7 @@
 
         @param enable: True if asserting write protect pin. Otherwise, False.
         """
-        try:
-            self.servo.set('fw_wp_state', 'force_on' if enable else 'force_off')
-        except:
-            # TODO(waihong): Remove this fallback when all servos have the
-            # above new fw_wp_state control.
-            self.servo.set('fw_wp_vref', self.faft_config.wp_voltage)
-            self.servo.set('fw_wp_en', 'on')
-            self.servo.set('fw_wp', 'on' if enable else 'off')
+        self.servo.set('fw_wp_state', 'force_on' if enable else 'force_off')
 
     def set_ec_write_protect_and_reboot(self, enable):
         """Set EC write protect status and reboot to take effect.
@@ -904,10 +897,6 @@
 
     def _setup_gbb_flags(self):
         """Setup the GBB flags for FAFT test."""
-        if self.faft_config.gbb_version < 1.1:
-            logging.info('Skip modifying GBB on versions older than 1.1.')
-            return
-
         if self.check_setup_done('gbb_flags'):
             return
 
diff --git a/server/cros/faft/utils/faft_checkers.py b/server/cros/faft/utils/faft_checkers.py
index 6db08fb..d6e89ea 100644
--- a/server/cros/faft/utils/faft_checkers.py
+++ b/server/cros/faft/utils/faft_checkers.py
@@ -59,34 +59,24 @@
             parsed_list[name] = value
         return parsed_list
 
-    def crossystem_checker(self, expected_dict, optional=None,
-                           suppress_logging=False):
+    def crossystem_checker(self, expected_dict, suppress_logging=False):
         """Check the crossystem values matched.
 
         Given an expect_dict which describes the expected crossystem values,
         this function check the current crossystem values are matched or not.
 
         @param expected_dict: A dict which contains the expected values.
-        @param optional: A list of expected_dict keys which are optional.  If
-                         crossystem does not report these keys (i.e. they don't
-                         exist on the system), they will not trigger a failure.
         @param suppress_logging: True to suppress any logging messages.
         @return: True if the crossystem value matched; otherwise, False.
         """
-        if optional == None:
-            optional = []
         succeed = True
         lines = self.faft_client.system.run_shell_command_get_output(
                 'crossystem')
         got_dict = self._parse_crossystem_output(lines)
         for key in expected_dict:
             if key not in got_dict:
-                if key in optional:
-                    logging.warn('Skipping optional key %r '
-                                 'not in crossystem result', key)
-                else:
-                    logging.warn('Expected key %r not in crossystem result', key)
-                    succeed = False
+                logging.warn('Expected key %r not in crossystem result', key)
+                succeed = False
                 continue
             if isinstance(expected_dict[key], str):
                 if got_dict[key] != expected_dict[key]:
@@ -121,28 +111,16 @@
         @param mode: A string of mode, one of 'normal', 'dev', or 'rec'.
         @return: True if the system in the given mode; otherwise, False.
         """
-        is_devsw = (self.faft_config.mode_switcher_type ==
-                    'physical_button_switcher')
         if mode == 'normal':
-            if is_devsw:
-                return self.crossystem_checker(
-                        {'devsw_cur': '0'},
-                        suppress_logging=True)
-            else:
-                return self.crossystem_checker(
-                        {'devsw_boot': '0',
-                         'mainfw_type': 'normal'},
-                        suppress_logging=True)
+            return self.crossystem_checker(
+                    {'devsw_boot': '0',
+                     'mainfw_type': 'normal'},
+                    suppress_logging=True)
         elif mode == 'dev':
-            if is_devsw:
-                return self.crossystem_checker(
-                        {'devsw_cur': '1'},
-                        suppress_logging=True)
-            else:
-                return self.crossystem_checker(
-                        {'devsw_boot': '1',
-                         'mainfw_type': 'developer'},
-                        suppress_logging=True)
+            return self.crossystem_checker(
+                    {'devsw_boot': '1',
+                     'mainfw_type': 'developer'},
+                    suppress_logging=True)
         elif mode == 'rec':
             return self.crossystem_checker(
                     {'mainfw_type': 'recovery'},
diff --git a/server/cros/faft/utils/mode_switcher.py b/server/cros/faft/utils/mode_switcher.py
index 214f6ac..6dc15d1 100644
--- a/server/cros/faft/utils/mode_switcher.py
+++ b/server/cros/faft/utils/mode_switcher.py
@@ -706,23 +706,6 @@
             raise ConnectionError('DUT is still up unexpectedly')
 
 
-class _PhysicalButtonSwitcher(_BaseModeSwitcher):
-    """Class that switches firmware mode via physical button."""
-
-    def _enable_dev_mode_and_reboot(self):
-        """Switch to developer mode and reboot."""
-        self.servo.enable_development_mode()
-        self.faft_client.system.run_shell_command(
-                'chromeos-firmwareupdate --mode todev && reboot')
-
-
-    def _enable_normal_mode_and_reboot(self):
-        """Switch to normal mode and reboot."""
-        self.servo.disable_development_mode()
-        self.faft_client.system.run_shell_command(
-                'chromeos-firmwareupdate --mode tonormal && reboot')
-
-
 class _KeyboardDevSwitcher(_BaseModeSwitcher):
     """Class that switches firmware mode via keyboard combo."""
 
@@ -943,10 +926,7 @@
     @param faft_framework: The main FAFT framework object.
     """
     switcher_type = faft_framework.faft_config.mode_switcher_type
-    if switcher_type == 'physical_button_switcher':
-        logging.info('Create a PhysicalButtonSwitcher')
-        return _PhysicalButtonSwitcher(faft_framework)
-    elif switcher_type == 'keyboard_dev_switcher':
+    if switcher_type == 'keyboard_dev_switcher':
         logging.info('Create a KeyboardDevSwitcher')
         return _KeyboardDevSwitcher(faft_framework)
     elif switcher_type == 'jetstream_switcher':
diff --git a/server/cros/network/netperf_runner.py b/server/cros/network/netperf_runner.py
index 6b1158e..a63af25 100644
--- a/server/cros/network/netperf_runner.py
+++ b/server/cros/network/netperf_runner.py
@@ -525,7 +525,7 @@
     def _restart_netserv(self):
         logging.info('Starting netserver...')
         self._kill_netserv()
-        self._server_host.run('%s -p %d >/dev/null 2>&1' %
+        self._server_host.run('%s -p %d' %
                               (self._command_netserv, self.NETPERF_PORT))
         startup_time = time.time()
         self._client_proxy.firewall_open('tcp', self._server_proxy.wifi_ip)
diff --git a/server/cros/servo/chrome_cr50.py b/server/cros/servo/chrome_cr50.py
index 746f8ec..f9b32cf 100644
--- a/server/cros/servo/chrome_cr50.py
+++ b/server/cros/servo/chrome_cr50.py
@@ -384,21 +384,18 @@
 
     def reboot(self):
         """Reboot Cr50 and wait for cr50 to reset"""
-        response = [] if self.using_ccd() else self.START_STR
-        self.send_command_get_output('reboot', response)
-
-        # ccd will stop working after the reboot. Wait until that happens and
-        # reenable it.
-        if self.using_ccd():
-            self.wait_for_reboot()
+        self.wait_for_reboot(cmd='reboot')
 
 
-    def _uart_wait_for_reboot(self, timeout=60):
-        """Wait for the cr50 to reboot and enable the console.
+    def _uart_wait_for_reboot(self, cmd='\n', timeout=60):
+        """Use uart to wait for cr50 to reboot.
 
-        This will wait up to timeout seconds for cr50 to print the start string.
+        If a command is given run it and wait for cr50 to reboot. Monitor
+        the cr50 uart to detect the reset. Wait up to timeout seconds
+        for the reset.
 
         Args:
+            cmd: the command to run to reset cr50.
             timeout: seconds to wait to detect the reboot.
         """
         original_timeout = float(self._servo.get('cr50_uart_timeout'))
@@ -406,7 +403,7 @@
         # for cr50 to print the start string.
         self._servo.set_nocheck('cr50_uart_timeout', timeout)
         try:
-            self.send_command_get_output('\n', self.START_STR)
+            self.send_command_get_output(cmd, self.START_STR)
             logging.debug('Detected cr50 reboot')
         except error.TestFail, e:
             logging.debug('Failed to detect cr50 reboot')
@@ -414,15 +411,24 @@
         self._servo.set_nocheck('cr50_uart_timeout', original_timeout)
 
 
-    def wait_for_reboot(self, timeout=60):
-        """Wait for cr50 to reboot"""
+    def wait_for_reboot(self, cmd='\n', timeout=60):
+        """Wait for cr50 to reboot
+
+        Run the cr50 reset command. Wait for cr50 to reset and reenable ccd if
+        necessary.
+
+        Args:
+            cmd: the command to run to reset cr50.
+            timeout: seconds to wait to detect the reboot.
+        """
         if self.using_ccd():
+            self.send_command(cmd)
             # Cr50 USB is reset when it reboots. Wait for the CCD connection to
             # go down to detect the reboot.
             self.wait_for_ccd_disable(timeout, raise_error=False)
             self.ccd_enable()
         else:
-            self._uart_wait_for_reboot(timeout)
+            self._uart_wait_for_reboot(cmd, timeout)
 
 
     def rollback(self, eraseflashinfo=True, chip_bid=None, chip_flags=None):
@@ -454,12 +460,7 @@
         if set_bid:
             self.send_command('bid 0x%x 0x%x' % (chip_bid, chip_flags))
 
-        if self.using_ccd():
-            self.send_command('rollback')
-            self.wait_for_reboot()
-        else:
-            logging.debug(self.send_command_get_output('rollback',
-                    ['.*Console is enabled'])[0])
+        self.wait_for_reboot(cmd='rollback')
 
         running_partition = self.get_active_version_info()[0]
         if inactive_partition != running_partition:
@@ -468,8 +469,8 @@
 
     def rolledback(self):
         """Returns true if cr50 just rolled back"""
-        return 'Rollback detected' in self.send_command_get_output('sysinfo',
-                ['sysinfo.*>'])[0]
+        return 'Rollback detected' in self.send_safe_command_get_output(
+                'sysinfo', ['sysinfo.*>'])[0]
 
 
     def get_version_info(self, regexp):
@@ -489,7 +490,7 @@
 
     def using_prod_rw_keys(self):
         """Returns True if the RW keyid is prod"""
-        rv = self.send_command_retry_get_output('sysinfo',
+        rv = self.send_safe_command_retry_get_output('sysinfo',
                 ['RW keyid:.*\(([a-z]+)\)'])
         logging.info(rv)
         return rv[0][1] == 'prod'
@@ -616,10 +617,10 @@
         """Reenable CCD and reset servo interfaces"""
         logging.info("reenable ccd")
         self._servo.set_nocheck('servo_v4_dts_mode', 'on')
-        # If the test is actually running with ccd, reset usb and wait for
-        # communication to come up.
+        # If the test is actually running with ccd, wait for USB communication
+        # to come up after reset.
         if self.using_ccd():
-            self._servo.set_nocheck('power_state', 'ccd_reset')
+            time.sleep(self._servo.USB_DETECTION_DELAY)
         self.wait_for_ccd_enable(raise_error=raise_error)
 
 
@@ -844,3 +845,27 @@
             logging.info('Cr50 has been up for %ds waiting %ds before update',
                          cr50_time, sleep_time)
             time.sleep(sleep_time)
+
+    def tpm_is_enabled(self):
+        """Query the current TPM mode.
+
+        Returns  True if TPM is enabled,
+                 False otherwise.
+        """
+        result = self.send_command_get_output('sysinfo',
+                ['(?i)TPM\s+MODE:\s+(enabled|disabled)'])[0][1]
+        logging.debug(result)
+
+        return result.lower() == 'enabled'
+
+    def keyladder_is_enabled(self):
+        """Get the status of H1 Key Ladder.
+
+        Returns True if H1 Key Ladder is enabled.
+                False otherwise.
+        """
+        result = self.send_command_get_output('sysinfo',
+                ['(?i)Key\s+Ladder:\s+(enabled|disabled)'])[0][1]
+        logging.debug(result)
+
+        return result.lower() == 'enabled'
diff --git a/server/cros/servo/servo.py b/server/cros/servo/servo.py
index da9f484..0ecccc5 100644
--- a/server/cros/servo/servo.py
+++ b/server/cros/servo/servo.py
@@ -137,6 +137,7 @@
     def __init__(self, servo):
         self._servo = servo
         self._streams = []
+        self._logs_dir = None
 
     def start_capture(self):
         """Start capturing Uart streams."""
@@ -151,13 +152,13 @@
                 logging.debug('The servod is too old that ec_uart_capture not '
                               'supported.')
 
-    def dump(self, output_dir):
-        """Dump UART streams to log files accordingly.
+    def dump(self):
+        """Dump UART streams to log files accordingly."""
+        if not self._logs_dir:
+            return
 
-        @param output_dir: A string of output directory name.
-        """
         for stream, logfile in self._streams:
-            logfile_fullname = os.path.join(output_dir, logfile)
+            logfile_fullname = os.path.join(self._logs_dir, logfile)
             try:
                 content = self._servo.get(stream)
             except Exception as err:
@@ -184,6 +185,18 @@
                 logging.warn('Failed to stop UART logging for %s: %s', uart,
                              err)
 
+    @property
+    def logs_dir(self):
+        """Return the directory to save UART logs."""
+        return self._logs_dir
+
+    @logs_dir.setter
+    def logs_dir(self, a_dir):
+        """Set directory to save UART logs.
+
+        @param a_dir  String of logs directory name."""
+        self._logs_dir = a_dir
+
 
 class Servo(object):
 
@@ -1001,17 +1014,24 @@
             logging.debug('Not a servo v4, unable to set role to %s.', role)
 
 
-    def dump_uart_streams(self, output_dir):
-        """Get buffered UART streams and append to log files.
+    @property
+    def uart_logs_dir(self):
+        """Return the directory to save UART logs."""
+        return self._uart.logs_dir if self._uart else ""
 
-        @param output_dir: A string of directory name to save log files.
-        """
+
+    @uart_logs_dir.setter
+    def uart_logs_dir(self, logs_dir):
+        """Set directory to save UART logs.
+
+        @param logs_dir  String of directory name."""
         if self._uart:
-            self._uart.dump(output_dir)
+            self._uart.logs_dir = logs_dir
 
 
     def close(self):
         """Close the servo object."""
         if self._uart:
             self._uart.stop_capture()
+            self._uart.dump()
             self._uart = None
diff --git a/server/cros/tradefed_chromelogin.py b/server/cros/tradefed_chromelogin.py
index de5b432..b799ac9 100644
--- a/server/cros/tradefed_chromelogin.py
+++ b/server/cros/tradefed_chromelogin.py
@@ -42,6 +42,10 @@
         board = kwargs.get('board')
         if board in constants.LOGIN_BOARD_TIMEOUT:
             self._timeout = constants.LOGIN_BOARD_TIMEOUT[board]
+        # DUT power off -> on cycle will still adhere DUT's reboot preference.
+        self._hard_reboot_on_failure = False
+        if kwargs.get('hard_reboot_on_failure') and self._need_reboot:
+            self._hard_reboot_on_failure = True
 
     def _cmd_builder(self, verbose=False):
         """Gets remote command to start browser with ARC enabled."""
@@ -180,8 +184,15 @@
         """
         logging.info('Rebooting...')
         try:
-            self._host.reboot()
-            self._need_reboot = False
+            if self._hard_reboot_on_failure and self._host.servo:
+                logging.info('Powering OFF the DUT: %s', self._host)
+                self._host.servo.get_power_state_controller().power_off()
+                logging.info('Powering ON the DUT: %s', self._host)
+                self._host.servo.get_power_state_controller().power_on()
+                self._hard_reboot_on_failure = False
+            else:
+                self._host.reboot()
+                self._need_reboot = False
         except Exception:
             if exc_type is None:
                 raise
diff --git a/server/cros/tradefed_test.py b/server/cros/tradefed_test.py
index 294a137..389a455 100644
--- a/server/cros/tradefed_test.py
+++ b/server/cros/tradefed_test.py
@@ -85,7 +85,8 @@
                    max_retry=None,
                    load_waivers=True,
                    retry_manual_tests=False,
-                   warn_on_test_retry=True):
+                   warn_on_test_retry=True,
+                   hard_reboot_on_failure=False):
         """Sets up the tools and binary bundles for the test."""
         self._install_paths = []
         # TODO(pwang): Remove host if we enable multiple hosts everywhere.
@@ -153,6 +154,7 @@
         # Load modules with no tests.
         self._notest_modules = self._get_expected_failures('notest_modules',
                 bundle)
+        self._hard_reboot_on_failure = hard_reboot_on_failure
 
     def cleanup(self):
         """Cleans up any dirtied state."""
@@ -1048,6 +1050,12 @@
             with self._login_chrome(
                     board=board,
                     reboot=self._should_reboot(steps),
+                    # TODO(rohitbm): Evaluate if power cycle really helps with
+                    # Bluetooth test failures, and then make the implementation
+                    # more strict by first running complete restart and reboot
+                    # retries and then perform power cycle.
+                    hard_reboot_on_failure=(self._hard_reboot_on_failure
+                                     and steps == self._max_retry),
                     dont_override_profile=keep_media) as current_logins:
                 self._ready_arc()
                 self._calculate_timeout_factor(bundle)
diff --git a/server/hosts/cros_label.py b/server/hosts/cros_label.py
index 5807e09..2b01b49 100644
--- a/server/hosts/cros_label.py
+++ b/server/hosts/cros_label.py
@@ -586,6 +586,23 @@
                         ignore_status=True).exit_status == 0
 
 
+class ReferenceDesignLabel(base_label.StringPrefixLabel):
+    """Determine the correct reference design label for the device. """
+
+    _NAME = 'reference_design'
+
+    def __init__(self):
+        self.response = None
+
+    def exists(self, host):
+        self.response = host.run('mosys platform family', ignore_status=True)
+        return self.response.exit_status == 0
+
+    def generate_labels(self, host):
+        if self.exists(host):
+            return [self.response.stdout.strip()]
+
+
 CROS_LABELS = [
     AccelsLabel(),
     ArcLabel(),
@@ -607,6 +624,7 @@
     LightSensorLabel(),
     LucidSleepLabel(),
     PowerSupplyLabel(),
+    ReferenceDesignLabel(),
     ServoLabel(),
     StorageLabel(),
     VideoGlitchLabel(),
diff --git a/server/hosts/servo_host.py b/server/hosts/servo_host.py
index 37b942e..7b1bb15 100644
--- a/server/hosts/servo_host.py
+++ b/server/hosts/servo_host.py
@@ -588,11 +588,11 @@
 
 
     def close(self):
-        """Stop UART logging and close the host object."""
+        """Close the associated servo and the host object."""
         if self._servo:
             # In some cases when we run as lab-tools, the job object is None.
-            if self.job:
-                self._servo.dump_uart_streams(self.job.resultdir)
+            if self.job and not self._servo.uart_logs_dir:
+                self._servo.uart_logs_dir = self.job.resultdir
             self._servo.close()
 
         super(ServoHost, self).close()
diff --git a/server/site_tests/audio_AudioARCPlayback/control.headphone b/server/site_tests/audio_AudioARCPlayback/control.headphone
index c428e71..1ac3548 100644
--- a/server/site_tests/audio_AudioARCPlayback/control.headphone
+++ b/server/site_tests/audio_AudioARCPlayback/control.headphone
@@ -14,7 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 DEPENDENCIES = "chameleon, audio_board, arc, test_audiojack"
 JOB_RETRIES = 2
 
diff --git a/server/site_tests/audio_AudioARCRecord/control.ext_mic b/server/site_tests/audio_AudioARCRecord/control.ext_mic
index 997afa4..fc6abd0 100644
--- a/server/site_tests/audio_AudioARCRecord/control.ext_mic
+++ b/server/site_tests/audio_AudioARCRecord/control.ext_mic
@@ -14,7 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 DEPENDENCIES = "chameleon, audio_board, arc, test_audiojack"
 JOB_RETRIES = 2
 
diff --git a/server/site_tests/audio_AudioAfterReboot/control.atrus_mic b/server/site_tests/audio_AudioAfterReboot/control.atrus_mic
deleted file mode 100644
index 88d5c55..0000000
--- a/server/site_tests/audio_AudioAfterReboot/control.atrus_mic
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.audio import audio_test_data
-from autotest_lib.client.cros.chameleon import chameleon_audio_ids
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_AudioAfterReboot.atrus_mic"
-PURPOSE = "Remotely controlled atrus_mic AFTER REBOOT audio test."
-CRITERIA = "This test will fail if the captured audio does not match original file."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod"
-DEPENDENCIES = "atrus, audio_board, audio_box, chameleon, usb_speaker"
-JOB_RETRIES = 3
-
-DOC = """
-This test remotely tests atrus_mic audio function against DUT after reboot.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioAfterReboot", host=host,
-                 golden_data=(audio_test_data.SIMPLE_FREQUENCY_TEST_FILE, 1500),
-                 bind_from=chameleon_audio_ids.ChameleonIds.LINEOUT,
-                 bind_to=chameleon_audio_ids.PeripheralIds.SPEAKER,
-                 recorder=chameleon_audio_ids.CrosIds.INTERNAL_MIC,
-                 cfm_speaker=True, audio_nodes=(['USB'], ['USB']),
-                 tag = "atrus_mic")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioAfterReboot/control.atrus_speaker b/server/site_tests/audio_AudioAfterReboot/control.atrus_speaker
deleted file mode 100644
index 6165e2b..0000000
--- a/server/site_tests/audio_AudioAfterReboot/control.atrus_speaker
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.audio import audio_test_data
-from autotest_lib.client.cros.chameleon import chameleon_audio_ids
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_AudioAfterReboot.atrus_speaker"
-PURPOSE = "Remotely controlled internal_speaker AFTER REBOOT audio test."
-CRITERIA = "This test will fail if the captured audio does not match original file."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod"
-DEPENDENCIES = "atrus, audio_board, audio_box, chameleon, usb_speaker"
-JOB_RETRIES = 3
-
-DOC = """
-This test remotely tests atrus_speaker's audio function against DUT after reboot.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioAfterReboot", host=host,
-                 golden_data=(audio_test_data.SIMPLE_FREQUENCY_SPEAKER_TEST_FILE, 1000),
-                 recorder=chameleon_audio_ids.ChameleonIds.MIC,
-                 source=chameleon_audio_ids.CrosIds.SPEAKER,
-                 cfm_speaker=True, audio_nodes=(['USB'], ['USB']),
-                 tag = "atrus_speaker")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBasicInternalMicrophone/control.atrus_mic b/server/site_tests/audio_AudioBasicInternalMicrophone/control.atrus_mic
deleted file mode 100644
index 36e3c3b..0000000
--- a/server/site_tests/audio_AudioBasicInternalMicrophone/control.atrus_mic
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_AudioBasicInternalMicrophone.atrus_mic"
-PURPOSE = "Remotely controlled USB speaker audio test."
-CRITERIA = "Test will fail if the captured audio does not match original file."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod"
-DEPENDENCIES = "atrus, audio_board, audio_box, chameleon, usb_speaker"
-JOB_RETRIES = 3
-
-DOC = """
-This test remotely tests atrus speaker's mic audio function.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-
-def run_test(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioBasicInternalMicrophone", host=host,
-            cfm_speaker=True, tag="atrus_mic")
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/audio_AudioBasicInternalSpeaker/control.atrus_speaker b/server/site_tests/audio_AudioBasicInternalSpeaker/control.atrus_speaker
deleted file mode 100644
index b82aa28..0000000
--- a/server/site_tests/audio_AudioBasicInternalSpeaker/control.atrus_speaker
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_AudioBasicInternalSpeaker.atrus_speaker"
-PURPOSE = "Remotely controlled USB speaker audio test."
-CRITERIA = "Test will fail if the captured audio does not match original file."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bluestreak-pre-cq,suite:hotrod"
-DEPENDENCIES = "atrus, audio_board, audio_box, chameleon, usb_speaker"
-JOB_RETRIES = 3
-
-DOC = """
-This test remotely tests usb speaker audio function.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-
-def run_test(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioBasicInternalSpeaker", host=host,
-            cfm_speaker=True, tag="atrus_speaker")
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend b/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend
index d88371a..297e09f 100644
--- a/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend
+++ b/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend
@@ -8,7 +8,7 @@
 NAME = "audio_AudioBasicUSBPlayback.suspend"
 PURPOSE = "Remotely controlled USB audio test."
 CRITERIA = "This test will fail if the captured audio does not match original file"
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
diff --git a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control
index 4d21517..20eee6f 100644
--- a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control
+++ b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control
@@ -8,7 +8,7 @@
 NAME = "audio_AudioBasicUSBPlaybackRecord"
 PURPOSE = "Remotely controlled USB audio test."
 CRITERIA = "This test will fail if the captured audio does not match original file."
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
diff --git a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend
index e526c15..931346d 100644
--- a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend
+++ b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend
@@ -8,7 +8,7 @@
 NAME = "audio_AudioBasicUSBPlaybackRecord.suspend"
 PURPOSE = "Remotely controlled USB audio test."
 CRITERIA = "This test will fail if the captured audio does not match original file."
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
diff --git a/server/site_tests/audio_AudioBasicUSBRecord/control.suspend b/server/site_tests/audio_AudioBasicUSBRecord/control.suspend
index fad9286..3a59089 100644
--- a/server/site_tests/audio_AudioBasicUSBRecord/control.suspend
+++ b/server/site_tests/audio_AudioBasicUSBRecord/control.suspend
@@ -8,7 +8,7 @@
 NAME = "audio_AudioBasicUSBRecord.suspend"
 PURPOSE = "Remotely controlled USB audio test."
 CRITERIA = "This test will fail if the captured audio does not match original file."
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
diff --git a/server/site_tests/audio_AudioVolume/control.atrus_speaker b/server/site_tests/audio_AudioVolume/control.atrus_speaker
deleted file mode 100644
index 25799cf..0000000
--- a/server/site_tests/audio_AudioVolume/control.atrus_speaker
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.audio import audio_test_data
-from autotest_lib.client.cros.chameleon import chameleon_audio_ids
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_AudioVolume.atrus_speaker"
-PURPOSE = "Remotely controlled speaker audio volume test."
-CRITERIA = "This test will fail if the recorded volume does not change"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod"
-DEPENDENCIES = "atrus, audio_board, audio_box, chameleon, usb_speaker"
-JOB_RETRIES = 2
-
-DOC = """
-This test remotely tests atrus speaker audio volume function against DUT.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioVolume", host=host, tag="atrus_speaker",
-                 golden_file=audio_test_data.SIMPLE_FREQUENCY_SPEAKER_TEST_FILE,
-                 source_id=chameleon_audio_ids.CrosIds.SPEAKER,
-                 sink_id=None, cfm_speaker=True,
-                 recorder_id=chameleon_audio_ids.ChameleonIds.MIC,
-                 volume_spec=(70, 100, 0.75)
-                )
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioVolume/control.hdmi b/server/site_tests/audio_AudioVolume/control.hdmi
index a7b76e3..3494961 100644
--- a/server/site_tests/audio_AudioVolume/control.hdmi
+++ b/server/site_tests/audio_AudioVolume/control.hdmi
@@ -14,7 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 DEPENDENCIES = "chameleon, audio_board, test_hdmiaudio"
 JOB_RETRIES = 2
 
diff --git a/server/site_tests/audio_AudioVolume/control.headphone b/server/site_tests/audio_AudioVolume/control.headphone
index 588b5e1..8746228 100644
--- a/server/site_tests/audio_AudioVolume/control.headphone
+++ b/server/site_tests/audio_AudioVolume/control.headphone
@@ -14,7 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
 JOB_RETRIES = 2
 
diff --git a/server/site_tests/audio_AudioVolume/control.speaker b/server/site_tests/audio_AudioVolume/control.speaker
index 69f4fca..83f3816 100644
--- a/server/site_tests/audio_AudioVolume/control.speaker
+++ b/server/site_tests/audio_AudioVolume/control.speaker
@@ -14,7 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 DEPENDENCIES = "chameleon, audio_board, audio_box"
 JOB_RETRIES = 2
 
diff --git a/server/site_tests/audio_AudioVolume/control.usb b/server/site_tests/audio_AudioVolume/control.usb
index 19fd88d..db3c1ff 100644
--- a/server/site_tests/audio_AudioVolume/control.usb
+++ b/server/site_tests/audio_AudioVolume/control.usb
@@ -14,7 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:chameleon_audio_perbuild"
 DEPENDENCIES = "chameleon, audio_board, test_usbaudio"
 JOB_RETRIES = 2
 
diff --git a/server/site_tests/bluetooth_Sanity_DefaultState/bluetooth_Sanity_DefaultState.py b/server/site_tests/bluetooth_Sanity_DefaultState/bluetooth_Sanity_DefaultState.py
index e0e163c..cc85acf 100644
--- a/server/site_tests/bluetooth_Sanity_DefaultState/bluetooth_Sanity_DefaultState.py
+++ b/server/site_tests/bluetooth_Sanity_DefaultState/bluetooth_Sanity_DefaultState.py
@@ -8,6 +8,8 @@
 from autotest_lib.client.common_lib.cros.bluetooth import bluetooth_socket
 from autotest_lib.server.cros.bluetooth import bluetooth_test
 
+DEVICE_ADDRESS = '01:02:03:04:05:06'
+ADDRESS_TYPE = 0
 
 class bluetooth_Sanity_DefaultState(bluetooth_test.BluetoothTest):
     """
@@ -63,6 +65,14 @@
             strs.append("RAW")
         logging.debug(msg + ' [HCI]: %s', " ".join(strs))
 
+    def cleanup(self):
+        """ Test specific cleanup
+            Remove any devices added to whitelist
+        """
+        self.device.remove_device(DEVICE_ADDRESS, ADDRESS_TYPE)
+        super(bluetooth_Sanity_DefaultState, self).cleanup()
+
+
     def compare_property(self, bluez_property, mgmt_setting, current_settings):
         """ Compare bluez property value and Kernel property
 
@@ -197,10 +207,27 @@
         # setting should remain off, but we should be able to see the PSCAN
         # flag come and go.
         if supports_add_device:
+            # If PSCAN is currently on then device is CONNECTABLE
+            # or a previous add device which was not removed.
+            # Turn on and off DISCOVERABLE to turn off CONNECTABLE and
+            # PSCAN
+            if flags & bluetooth_socket.HCI_PSCAN:
+                if not (current_settings &
+                        bluetooth_socket.MGMT_SETTING_CONNECTABLE):
+                    raise error.TestFail('PSCAN on but device not CONNECTABLE')
+                logging.debug('Toggle Discoverable to turn off CONNECTABLE')
+                self.device.set_discoverable(True)
+                self.device.set_discoverable(False)
+                current_settings = self.device.read_info()[4]
+                flags = self.device.get_dev_info()[3]
+                self._log_flags('Discoverability Toggled', flags)
+                if flags & bluetooth_socket.HCI_PSCAN:
+                    raise error.TestFail('PSCAN on after toggling DISCOVERABLE')
+
             previous_settings = current_settings
             previous_flags = flags
 
-            self.device.add_device('01:02:03:04:05:06', 0, 1)
+            self.device.add_device(DEVICE_ADDRESS, ADDRESS_TYPE, 1)
 
             current_settings = self.device.read_info()[4]
             self._log_settings("After add device", current_settings)
@@ -209,13 +236,15 @@
             self._log_flags('After add device', flags)
 
             if current_settings != previous_settings:
+                self._log_settings("previous settings", previous_settings)
+                self._log_settings("current settings", current_settings)
                 raise error.TestFail(
                     'Bluetooth adapter settings changed after add device')
             if not flags & bluetooth_socket.HCI_PSCAN:
                 raise error.TestFail('HCI PSCAN flag not set after add device')
 
             # Remove the device again, and make sure the PSCAN flag goes away.
-            self.device.remove_device('01:02:03:04:05:06', 0)
+            self.device.remove_device(DEVICE_ADDRESS, ADDRESS_TYPE)
 
             current_settings = self.device.read_info()[4]
             self._log_settings("After remove device", current_settings)
diff --git a/server/site_tests/cheets_CTS_N/cheets_CTS_N.py b/server/site_tests/cheets_CTS_N/cheets_CTS_N.py
index 6d7c65a..1b96797 100644
--- a/server/site_tests/cheets_CTS_N/cheets_CTS_N.py
+++ b/server/site_tests/cheets_CTS_N/cheets_CTS_N.py
@@ -150,7 +150,8 @@
                    load_waivers=True,
                    retry_manual_tests=False,
                    warn_on_test_retry=True,
-                   cmdline_args=None):
+                   cmdline_args=None,
+                   hard_reboot_on_failure=False):
         super(cheets_CTS_N, self).initialize(
                 bundle=bundle,
                 uri=uri,
@@ -159,7 +160,8 @@
                 max_retry=max_retry,
                 load_waivers=load_waivers,
                 retry_manual_tests=retry_manual_tests,
-                warn_on_test_retry=warn_on_test_retry)
+                warn_on_test_retry=warn_on_test_retry,
+                hard_reboot_on_failure=hard_reboot_on_failure)
         if camera_facing:
             self.initialize_camerabox(camera_facing, cmdline_args)
 
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/control.riseplayer b/server/site_tests/enterprise_LongevityTrackerServer/control.riseplayer
index b64ed40..dd94eea 100644
--- a/server/site_tests/enterprise_LongevityTrackerServer/control.riseplayer
+++ b/server/site_tests/enterprise_LongevityTrackerServer/control.riseplayer
@@ -9,7 +9,7 @@
 TEST_CATEGORY = "Performance"
 TEST_CLASS = "performance"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:longevity, suite:longevity_two"
+ATTRIBUTES = "suite:kiosk_longevity"
 DEPENDENCIES = "riseplayer"
 
 DOC = """
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/enterprise_LongevityTrackerServer.py b/server/site_tests/enterprise_LongevityTrackerServer/enterprise_LongevityTrackerServer.py
index 29248fc..9e4cf06 100644
--- a/server/site_tests/enterprise_LongevityTrackerServer/enterprise_LongevityTrackerServer.py
+++ b/server/site_tests/enterprise_LongevityTrackerServer/enterprise_LongevityTrackerServer.py
@@ -216,8 +216,8 @@
                 kiosk_app_attributes=kiosk_app_attributes,
                 check_client_result=True)
 
-        if self.kiosk_app_name == 'riseplayer':
-            self.kiosk_facade.config_rise_player(ext_id, app_config_id)
+        #if self.kiosk_app_name == 'riseplayer':
+        #    self.kiosk_facade.config_rise_player(ext_id, app_config_id)
 
 
     def _run_perf_capture_cycle(self):
@@ -357,7 +357,6 @@
         time.sleep(STABILIZATION_DURATION)
 
         self._initialize_test_variables()
-
         for iteration in range(self.perf_params['perf_capture_iterations']):
             #TODO(krishnargv@): Add a method to verify that the Kiosk app is
             #                   active and is running on the DUT.
@@ -374,5 +373,4 @@
                         self.resultsdir, 'results-chart.json')
                 data_obj = self._format_data_for_upload(chart_data)
                 self._send_to_dashboard(data_obj)
-
         tpm_utils.ClearTPMOwnerRequest(self.client)
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.dev.100 b/server/site_tests/firmware_ConsecutiveBoot/control.dev.100
new file mode 100644
index 0000000..65f9df0
--- /dev/null
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.dev.100
@@ -0,0 +1,37 @@
+# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "Chrome OS Team"
+NAME = "firmware_ConsecutiveBoot.dev.100"
+PURPOSE = "Servo based consecutive boot test (100 iterations)"
+CRITERIA = "This test will fail if DUT fails to boot from power-off"
+ATTRIBUTES = "suite:stress"
+TIME = "LONG"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo"
+
+DOC = """
+This test is intended to be run with many iterations to ensure that the DUT
+does boot into Chrome OS and then does power off later in developer mode.
+
+Runs 100 boot iterations.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+args.append('faft_iterations=100')
+args.append('faft_waitup_time=60')
+args.append('faft_localrun=1')
+
+def run_consecutive_boot(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_ConsecutiveBoot", host=host, cmdline_args=args,
+                 disable_sysinfo=True, dev_mode=True, tag="normal")
+
+parallel_simple(run_consecutive_boot, machines)
diff --git a/server/site_tests/firmware_CorruptRecoveryCache/control b/server/site_tests/firmware_CorruptRecoveryCache/control
index 93015fc..2ef8d5b 100644
--- a/server/site_tests/firmware_CorruptRecoveryCache/control
+++ b/server/site_tests/firmware_CorruptRecoveryCache/control
@@ -8,7 +8,7 @@
 NAME = "firmware_CorruptRecoveryCache"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE corruption test"
 CRITERIA = "This test will fail if the cache doesn't retrain and boot into recovery"
-ATTRIBUTES = "suite:faft_lv3"
+ATTRIBUTES = "suite:faft_bios, suite:faft_lv3"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
diff --git a/server/site_tests/firmware_CorruptRecoveryCache/control.dev b/server/site_tests/firmware_CorruptRecoveryCache/control.dev
index 8c54e47..f573a74 100644
--- a/server/site_tests/firmware_CorruptRecoveryCache/control.dev
+++ b/server/site_tests/firmware_CorruptRecoveryCache/control.dev
@@ -8,7 +8,7 @@
 NAME = "firmware_CorruptRecoveryCache"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE corruption test"
 CRITERIA = "This test will fail if the cache doesn't retrain and boot into recovery"
-ATTRIBUTES = "suite:faft_lv3"
+ATTRIBUTES = "suite:faft_bios, suite:faft_lv3"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
diff --git a/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py b/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py
index 9758cd1..9f195bb 100644
--- a/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py
+++ b/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py
@@ -77,7 +77,8 @@
 
     def get_output(self, cmd, regexp, split_str, sort):
         """Return the cr50 console output"""
-        output = self.cr50.send_command_get_output(cmd, [regexp])[0][1].strip()
+        output = self.cr50.send_safe_command_get_output(cmd,
+                                                        [regexp])[0][1].strip()
         logging.debug('%s output:%s\n', cmd, output)
 
         # Record the original command output
diff --git a/server/site_tests/firmware_Cr50DeferredECReset/control b/server/site_tests/firmware_Cr50DeferredECReset/control
index 7d8dde8..adbc3d3 100644
--- a/server/site_tests/firmware_Cr50DeferredECReset/control
+++ b/server/site_tests/firmware_Cr50DeferredECReset/control
@@ -7,7 +7,7 @@
 AUTHOR = "Chrome OS Team"
 NAME = "firmware_Cr50DeferredECReset"
 PURPOSE = "Verify Deferred EC Reset."
-ATTRIBUTES = ""
+ATTRIBUTES = "suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo"
diff --git a/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py b/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
index 8ed55d9..e22fa0e 100644
--- a/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
+++ b/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
@@ -86,11 +86,6 @@
         self.is_prod_mp = self.get_prod_mp_status()
 
 
-    def tpm_is_enabled(self):
-        """TPM is disabled if the tpm version cant be retrieved"""
-        return not self.host.run('tpm_version', ignore_status=True).exit_status
-
-
     def get_prod_mp_status(self):
         """Returns True if Cr50 is running a prod signed mp flagged image"""
         # Determine if the running image is using premp flags
@@ -265,7 +260,7 @@
 
         self.host.reboot()
 
-        if not self.tpm_is_enabled():
+        if not self.tpm_is_responsive():
             raise error.TestFail('TPM was not reenabled after reboot')
 
         # Run RMA disable to reset the capabilities.
@@ -279,7 +274,7 @@
         # The open process takes some time to complete. Wait for it.
         time.sleep(self.CHALLENGE_INTERVAL)
 
-        if self.tpm_is_enabled():
+        if self.tpm_is_responsive():
             raise error.TestFail('TPM was not disabled after RMA open')
 
         if self.cr50.get_wp_state() != self.WP_PERMANENTLY_DISABLED:
@@ -294,7 +289,7 @@
         # The open process takes some time to complete. Wait for it.
         time.sleep(self.CHALLENGE_INTERVAL)
 
-        if not self.tpm_is_enabled():
+        if not self.tpm_is_responsive():
             raise error.TestFail('TPM is disabled')
 
         # Confirm write protect has been reset to follow battery presence. The
diff --git a/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py b/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py
index e963fe8..6d03ca6 100644
--- a/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py
+++ b/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py
@@ -88,17 +88,6 @@
 
     def run_once(self):
         """Try to set testlab mode from different privilege levels."""
-        # ccd testlab can only be enabled after ccd is opened. This test wont
-        # do much if we can't open the device. firmware_Cr50Open should be
-        # enough to test ccd open capabilities. Do a basic test to make sure
-        # testlab mode can't be enabled while the device is locked, then raise
-        # test NA error.
-        if self.ccd_lockout:
-            self.cr50.set_ccd_level('lock')
-            self.try_testlab('on', err=self.ACCESS_DENIED)
-            raise error.TestNAError('Skipping firmware_Cr50Testlab when ccd is '
-                    'locked out.')
-
         # Dummy isn't a valid mode. Make sure it fails
         self.reset_ccd()
         self.try_testlab('dummy', err=self.INVALID_PARAM)
diff --git a/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py b/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py
index 2b8e6d7..73ce58f 100644
--- a/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py
+++ b/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py
@@ -37,10 +37,6 @@
         return cr50_utils.GSCTool(self.host,
                  ['-a', opt_text, mode_param]).stdout.strip()
 
-    def tpm_ping(self):
-        """Check TPM responsiveness by running tpm_version."""
-        return self.host.run('tpm_version').stdout.strip()
-
     def run_test_tpm_mode(self, disable_tpm, long_opt):
         """Run a test for the case of either disabling TPM or enabling.
 
@@ -51,19 +47,32 @@
         """
         # Reset the device.
         logging.info('Reset')
+
         self.servo.get_power_state_controller().reset()
         self.switcher.wait_for_client()
 
-        # Query TPM mode, which should be 'enabled (0)'.
+        self.fast_open(True)
+
+        # Check if TPM is enabled through console command.
         logging.info('Get TPM Mode')
+        if not self.cr50.tpm_is_enabled():
+            raise error.TestFail('TPM is not enabled after reset,')
+
+        # Check if Key Ladder is enabled.
+        if not self.cr50.keyladder_is_enabled():
+            raise error.TestFail('Failed to restore H1 Key Ladder')
+
+        # Check if TPM is enabled through gsctool.
         output_log = self.get_tpm_mode(long_opt)
         logging.info(output_log)
-        if output_log != 'TPM Mode: enabled (0)':
-            raise error.TestFail('Failure in reading TPM mode after reset')
+        if not 'enabled (0)' in output_log.lower():
+            raise error.TestFail('Failed to read TPM mode after reset')
 
-        # Check that TPM is enabled.
-        self.tpm_ping()
-        logging.info('Checked TPM is enabled')
+        # Check if CR50 responds to a TPM request.
+        if self.tpm_is_responsive():
+            logging.info('Checked TPM response')
+        else:
+            raise error.TestFail('Failed to check TPM response')
 
         # Change TPM Mode
         logging.info('Set TPM Mode')
@@ -72,30 +81,32 @@
 
         # Check the result of TPM Mode.
         if disable_tpm:
-            if output_log != 'TPM Mode: disabled (2)':
-                raise error.TestFail('Failure in disabling TPM: %s' %
-                        output_log)
+            if not 'disabled (2)' in output_log.lower():
+                raise error.TestFail('Failed to disable TPM: %s' % output_log)
 
-            # Check that TPM is disabled. The run should fail.
-            try:
-                result = self.tpm_ping()
-            except error.AutoservRunError:
-                logging.info('Checked TPM is disabled')
+            # Check if TPM is disabled. The run should fail.
+            if self.tpm_is_responsive():
+                raise error.TestFail('TPM responded')
             else:
-                raise error.TestFail('Unexpected TPM response: %s' % result)
-        else:
-            if output_log != 'TPM Mode: enabled (1)':
-                raise error.TestFail('Failure in enabling TPM: %s' % output_log)
+                logging.info('TPM did not respond')
 
-            # Check the TPM is enabled still.
-            self.tpm_ping()
-            logging.info('Checked TPM is enabled')
+            if self.cr50.keyladder_is_enabled():
+                raise error.TestFail('Failed to revoke H1 Key Ladder')
+        else:
+            if not 'enabled (1)' in output_log.lower():
+                raise error.TestFail('Failed to enable TPM: %s' % output_log)
+
+            # Check if TPM is enabled still.
+            if self.tpm_is_responsive():
+                logging.info('Checked TPM response')
+            else:
+                raise error.TestFail('Failed to check TPM response')
 
             # Subsequent set-TPM-mode vendor command should fail.
             try:
                 output_log = self.set_tpm_mode(not disable_tpm, long_opt)
             except error.AutoservRunError:
-                logging.info('Expected failure in disabling TPM mode');
+                logging.info('Expectedly failed to disable TPM mode');
             else:
                 raise error.TestFail('Unexpected result in disabling TPM mode:'
                         ' %s' % output_log)
diff --git a/server/site_tests/firmware_Cr50U2fCommands/control b/server/site_tests/firmware_Cr50U2fCommands/control
index 307ad34..1e3f12f 100644
--- a/server/site_tests/firmware_Cr50U2fCommands/control
+++ b/server/site_tests/firmware_Cr50U2fCommands/control
@@ -7,6 +7,7 @@
 AUTHOR = "louiscollard,apronin"
 NAME = "firmware_Cr50U2fCommands"
 PURPOSE = "Test U2F functionality in cr50"
+ATTRIBUTES = "suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo"
@@ -22,4 +23,4 @@
     host = hosts.create_host(machine, servo_args=servo_args)
     job.run_test('firmware_Cr50U2fCommands', host=host)
 
-parallel_simple(run_test, machines)
\ No newline at end of file
+parallel_simple(run_test, machines)
diff --git a/server/site_tests/firmware_DevTriggerRecovery/control b/server/site_tests/firmware_DevTriggerRecovery/control
deleted file mode 100644
index 5fb87ed..0000000
--- a/server/site_tests/firmware_DevTriggerRecovery/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "firmware_DevTriggerRecovery"
-PURPOSE = "Servo based recovery boot triggered by pressing a key at dev screen"
-CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft, suite:faft_lv2, suite:faft_normal"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "firmware"
-TEST_TYPE = "server"
-
-DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
-image (built by "build_image --test"). On runtime, this test changes dev
-switch and reboot. It then presses the enter key at dev warning screen to
-trigger recovery boot and checks the success of it.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_devtriggerrecovery(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("firmware_DevTriggerRecovery", host=host, cmdline_args=args,
-                 disable_sysinfo=True, dev_mode=False, tag="normal")
-
-parallel_simple(run_devtriggerrecovery, machines)
diff --git a/server/site_tests/firmware_DevTriggerRecovery/firmware_DevTriggerRecovery.py b/server/site_tests/firmware_DevTriggerRecovery/firmware_DevTriggerRecovery.py
deleted file mode 100644
index e35472f..0000000
--- a/server/site_tests/firmware_DevTriggerRecovery/firmware_DevTriggerRecovery.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros import vboot_constants as vboot
-from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
-
-
-class firmware_DevTriggerRecovery(FirmwareTest):
-    """
-    Servo based recovery boot test triggered by pressing enter at dev screen.
-
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
-    image (built by "build_image --test"). On runtime, this test changes dev
-    switch and reboot. It then presses the enter key at dev warning screen to
-    trigger recovery boot and checks the success of it.
-    """
-    version = 1
-
-    def initialize(self, host, cmdline_args):
-        """Initialize the test"""
-        super(firmware_DevTriggerRecovery, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('normal')
-        self.setup_usbkey(usbkey=True, host=False)
-
-    def run_once(self):
-        """Main test logic"""
-        if self.faft_config.mode_switcher_type != 'physical_button_switcher':
-            raise error.TestNAError('This test is only valid in physical button'
-                                    'controlled dev mode firmware.')
-
-        logging.info("Enable dev mode.")
-        self.check_state((self.checkers.crossystem_checker, {
-                              'devsw_boot': '0',
-                              'mainfw_act': 'A',
-                              'mainfw_type': 'normal',
-                              }))
-        self.servo.enable_development_mode()
-        self.switcher.mode_aware_reboot(wait_for_dut_up=False)
-        self.switcher.bypass_dev_mode()
-        self.switcher.wait_for_client()
-
-        logging.info("Expected values based on platforms (see above), "
-                     "run 'chromeos-firmwareupdate --mode todev && reboot', "
-                     "and trigger recovery boot at dev screen. ")
-        self.check_state((self.checkers.crossystem_checker, {
-                    'devsw_boot': '1',
-                    'mainfw_act': 'A',
-                    'mainfw_type': 'developer',
-                    }))
-        self.faft_client.system.run_shell_command(
-                 'chromeos-firmwareupdate --mode todev && reboot')
-        # Ignore the default reboot_action here because the
-        # userspace_action (firmware updater) will reboot the system.
-        self.switcher.trigger_dev_to_rec()
-        self.switcher.wait_for_client()
-
-        logging.info("Expected recovery boot and disable dev switch.")
-        self.check_state((self.checkers.crossystem_checker, {
-                     'devsw_boot': '1',
-                     'mainfw_type': 'recovery',
-                     'recovery_reason' : vboot.RECOVERY_REASON['RW_DEV_SCREEN'],
-                     }))
-        self.servo.disable_development_mode()
-        self.switcher.mode_aware_reboot()
-
-        logging.info("Expected values based on platforms (see above), "
-                     "and run 'chromeos-firmwareupdate --mode tonormal && "
-                     "reboot'")
-        self.check_state((self.checkers.crossystem_checker, {
-                    'devsw_boot': '0',
-                    'mainfw_act': 'A',
-                    'mainfw_type': 'normal',
-                    }))
-        self.faft_client.system.run_shell_command(
-                            'chromeos-firmwareupdate --mode tonormal && reboot')
-        self.switcher.wait_for_client()
-
-        logging.info("Expected normal mode boot, done.")
-        self.check_state((self.checkers.crossystem_checker, {
-                              'devsw_boot': '0',
-                              'mainfw_act': 'A',
-                              'mainfw_type': 'normal',
-                              }))
diff --git a/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py b/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py
index bd84a5c..373a485 100644
--- a/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py
+++ b/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py
@@ -27,7 +27,7 @@
         # Test CCD if servo has access to Cr50, is running with CCD v1, and has
         # testlab mode enabled.
         self.test_ccd_unlock = (hasattr(self, 'cr50') and
-            self.cr50.has_command('ccdstate') and not self.ccd_lockout)
+            self.cr50.has_command('ccdstate'))
 
         logging.info('%sTesting CCD', '' if self.test_ccd_unlock else 'Not')
         if self.test_ccd_unlock:
diff --git a/server/site_tests/firmware_RecoveryCacheBootKeys/control b/server/site_tests/firmware_RecoveryCacheBootKeys/control
index 9febe60..7c64c5d 100644
--- a/server/site_tests/firmware_RecoveryCacheBootKeys/control
+++ b/server/site_tests/firmware_RecoveryCacheBootKeys/control
@@ -8,7 +8,7 @@
 NAME = "firmware_RecoveryCacheBootKeys"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE boot tests"
 CRITERIA = "This test will fail if the DUT doesn't use the cache during recovery boot."
-ATTRIBUTES = "suite:faft_lv2"
+ATTRIBUTES = "suite:faft_bios, suite:faft_lv2"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
diff --git a/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev b/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev
index 8a45cbf..f953528 100644
--- a/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev
+++ b/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev
@@ -8,7 +8,7 @@
 NAME = "firmware_RecoveryCacheBootKeys"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE boot tests"
 CRITERIA = "This test will fail if the DUT doesn't use the cache during recovery boot."
-ATTRIBUTES = "suite:faft_lv2"
+ATTRIBUTES = "suite:faft_bios, suite:faft_lv2"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
diff --git a/server/site_tests/network_WiFi_BSSTMReq/control b/server/site_tests/network_WiFi_BSSTMReq/control
index aea8e3b..1f7dcd0 100644
--- a/server/site_tests/network_WiFi_BSSTMReq/control
+++ b/server/site_tests/network_WiFi_BSSTMReq/control
@@ -7,7 +7,7 @@
 NAME = 'network_WiFi_BSSTMReq'
 TEST_TYPE = 'Server'
 #ATTRIBUTES = ('suite:wifi_endtoend, suite:wifi_release, '
-#              'suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+#              'suite:wifi_matfunc')
 ATTRIBUTES = ('suite:wifi_flaky')
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_BeaconInterval/control.wifi_bintval b/server/site_tests/network_WiFi_BeaconInterval/control.wifi_bintval
index 807e692..ccbd2ff 100644
--- a/server/site_tests/network_WiFi_BeaconInterval/control.wifi_bintval
+++ b/server/site_tests/network_WiFi_BeaconInterval/control.wifi_bintval
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_BeaconInterval.wifi_bintval'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_BgscanBackoff/control.5760noise_check b/server/site_tests/network_WiFi_BgscanBackoff/control.5760noise_check
index c3923ea..44aa639 100644
--- a/server/site_tests/network_WiFi_BgscanBackoff/control.5760noise_check
+++ b/server/site_tests/network_WiFi_BgscanBackoff/control.5760noise_check
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_BgscanBackoff.5760_noise_check'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_BgscanBackoff/control.wifi_bgscan_backoff b/server/site_tests/network_WiFi_BgscanBackoff/control.wifi_bgscan_backoff
index bd1c296..37e3cad 100644
--- a/server/site_tests/network_WiFi_BgscanBackoff/control.wifi_bgscan_backoff
+++ b/server/site_tests/network_WiFi_BgscanBackoff/control.wifi_bgscan_backoff
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_BgscanBackoff.wifi_bgscan_backoff'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_CSADisconnect/control b/server/site_tests/network_WiFi_CSADisconnect/control
index ee76eb4..e5e2e7e 100644
--- a/server/site_tests/network_WiFi_CSADisconnect/control
+++ b/server/site_tests/network_WiFi_CSADisconnect/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_CSADisconnect'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_ChannelScanDwellTime/control b/server/site_tests/network_WiFi_ChannelScanDwellTime/control
index a66aa05..3c0e32f 100644
--- a/server/site_tests/network_WiFi_ChannelScanDwellTime/control
+++ b/server/site_tests/network_WiFi_ChannelScanDwellTime/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_ChannelScanDwellTime'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_ConnectionIdentifier/control b/server/site_tests/network_WiFi_ConnectionIdentifier/control
index 1f166e6..eea5131 100644
--- a/server/site_tests/network_WiFi_ConnectionIdentifier/control
+++ b/server/site_tests/network_WiFi_ConnectionIdentifier/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_ConnectionIdentifier'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_DTIMPeriod/control.wifi_DTIM_period b/server/site_tests/network_WiFi_DTIMPeriod/control.wifi_DTIM_period
index 4b6782b..3834c68 100644
--- a/server/site_tests/network_WiFi_DTIMPeriod/control.wifi_DTIM_period
+++ b/server/site_tests/network_WiFi_DTIMPeriod/control.wifi_DTIM_period
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_DTIMPeriod.wifi_DTIM_period'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_DisableEnable/control b/server/site_tests/network_WiFi_DisableEnable/control
index 8135fc4..bc30732 100644
--- a/server/site_tests/network_WiFi_DisableEnable/control
+++ b/server/site_tests/network_WiFi_DisableEnable/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_DisableEnable'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
+ATTRIBUTES = ('suite:wifi_matfunc,'
               'suite:wificell-pre-cq')
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_DisableRandomMACAddress/control b/server/site_tests/network_WiFi_DisableRandomMACAddress/control
index 8dc735c..f011afc 100644
--- a/server/site_tests/network_WiFi_DisableRandomMACAddress/control
+++ b/server/site_tests/network_WiFi_DisableRandomMACAddress/control
@@ -7,7 +7,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_matfunc, suite:wifi_matfunc_noservo'
+ATTRIBUTES = 'suite:wifi_matfunc'
 
 DOC = """
 This test verifies that MAC address randomization can be
diff --git a/server/site_tests/network_WiFi_DisconnectClearsIP/control b/server/site_tests/network_WiFi_DisconnectClearsIP/control
index b4672d0..2d00f2a 100644
--- a/server/site_tests/network_WiFi_DisconnectClearsIP/control
+++ b/server/site_tests/network_WiFi_DisconnectClearsIP/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_DisconnectClearsIP'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.ap_gone b/server/site_tests/network_WiFi_DisconnectReason/control.ap_gone
index 4e878ef..4d7fb29 100644
--- a/server/site_tests/network_WiFi_DisconnectReason/control.ap_gone
+++ b/server/site_tests/network_WiFi_DisconnectReason/control.ap_gone
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_DisconnectReason.ap_gone'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.ap_send_chan_switch b/server/site_tests/network_WiFi_DisconnectReason/control.ap_send_chan_switch
index 02b6a23..80f2ad2 100644
--- a/server/site_tests/network_WiFi_DisconnectReason/control.ap_send_chan_switch
+++ b/server/site_tests/network_WiFi_DisconnectReason/control.ap_send_chan_switch
@@ -13,8 +13,7 @@
 #       Removing from all test suites for now.
 
 ATTRIBUTES = (
-#             'suite:wifi_matfunc, '
-#             'suite:wifi_matfunc_noservo'
+#             'suite:wifi_matfunc'
 )
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.deauth_client b/server/site_tests/network_WiFi_DisconnectReason/control.deauth_client
index 91ccda3..09d06f8 100644
--- a/server/site_tests/network_WiFi_DisconnectReason/control.deauth_client
+++ b/server/site_tests/network_WiFi_DisconnectReason/control.deauth_client
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_DisconnectReason.deauth_client'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.disable_client_wifi b/server/site_tests/network_WiFi_DisconnectReason/control.disable_client_wifi
index 1a4beb8..062bf64 100644
--- a/server/site_tests/network_WiFi_DisconnectReason/control.disable_client_wifi
+++ b/server/site_tests/network_WiFi_DisconnectReason/control.disable_client_wifi
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_DisconnectReason.disable_client_wifi'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.switch_ap b/server/site_tests/network_WiFi_DisconnectReason/control.switch_ap
index 49cbb79..bd83c3e 100644
--- a/server/site_tests/network_WiFi_DisconnectReason/control.switch_ap
+++ b/server/site_tests/network_WiFi_DisconnectReason/control.switch_ap
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_DisconnectReason.switch_ap'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_GTK/control b/server/site_tests/network_WiFi_GTK/control
index 6a1b22b..7e5af16 100644
--- a/server/site_tests/network_WiFi_GTK/control
+++ b/server/site_tests/network_WiFi_GTK/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_GTK'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_HiddenRemains/control b/server/site_tests/network_WiFi_HiddenRemains/control
index 3ce2914..db62bfd 100644
--- a/server/site_tests/network_WiFi_HiddenRemains/control
+++ b/server/site_tests/network_WiFi_HiddenRemains/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_HiddenRemains'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_HiddenScan/control b/server/site_tests/network_WiFi_HiddenScan/control
index bf24adc..7a48cc7 100644
--- a/server/site_tests/network_WiFi_HiddenScan/control
+++ b/server/site_tests/network_WiFi_HiddenScan/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_HiddenScan'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_IBSS/control.wifi_IBSS b/server/site_tests/network_WiFi_IBSS/control.wifi_IBSS
index a8e9c23..7fad1ff 100644
--- a/server/site_tests/network_WiFi_IBSS/control.wifi_IBSS
+++ b/server/site_tests/network_WiFi_IBSS/control.wifi_IBSS
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_IBSS.wifi_IBSS'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_LinkMonitorFailure/control b/server/site_tests/network_WiFi_LinkMonitorFailure/control
index ef66ecf..cc93899 100644
--- a/server/site_tests/network_WiFi_LinkMonitorFailure/control
+++ b/server/site_tests/network_WiFi_LinkMonitorFailure/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_LinkMonitorFailure'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = "suite:wifi_matfunc, suite:wifi_matfunc_noservo"
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_LowInitialBitrates/control b/server/site_tests/network_WiFi_LowInitialBitrates/control
index c045670..88a5510 100644
--- a/server/site_tests/network_WiFi_LowInitialBitrates/control
+++ b/server/site_tests/network_WiFi_LowInitialBitrates/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_LowInitialBitrates'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_MalformedProbeResp/control b/server/site_tests/network_WiFi_MalformedProbeResp/control
index 92d68ff..14205d6 100644
--- a/server/site_tests/network_WiFi_MalformedProbeResp/control
+++ b/server/site_tests/network_WiFi_MalformedProbeResp/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_MalformedProbeResp'
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_MaskedBSSID/control.wifi_masked_bssid b/server/site_tests/network_WiFi_MaskedBSSID/control.wifi_masked_bssid
index 692bbe8..af9d2e6 100644
--- a/server/site_tests/network_WiFi_MaskedBSSID/control.wifi_masked_bssid
+++ b/server/site_tests/network_WiFi_MaskedBSSID/control.wifi_masked_bssid
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_MaskedBSSID.wifi_masked_bssid'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_MissingBeacons/control b/server/site_tests/network_WiFi_MissingBeacons/control
index 382cc38..bd48b1f 100644
--- a/server/site_tests/network_WiFi_MissingBeacons/control
+++ b/server/site_tests/network_WiFi_MissingBeacons/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_MissingBeacons'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_MultiAuth/control b/server/site_tests/network_WiFi_MultiAuth/control
index 7cf7c92..be52bff 100644
--- a/server/site_tests/network_WiFi_MultiAuth/control
+++ b/server/site_tests/network_WiFi_MultiAuth/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_MultiAuth'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_OverlappingBSSScan/control b/server/site_tests/network_WiFi_OverlappingBSSScan/control
index cd61626..252b211 100644
--- a/server/site_tests/network_WiFi_OverlappingBSSScan/control
+++ b/server/site_tests/network_WiFi_OverlappingBSSScan/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_OverlappingBSSScan'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_PMKSACaching/control b/server/site_tests/network_WiFi_PMKSACaching/control
index 1b53188..20e5540 100644
--- a/server/site_tests/network_WiFi_PMKSACaching/control
+++ b/server/site_tests/network_WiFi_PMKSACaching/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_PMKSACaching'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_PTK/control b/server/site_tests/network_WiFi_PTK/control
index 8ca701b..c6db9c5 100644
--- a/server/site_tests/network_WiFi_PTK/control
+++ b/server/site_tests/network_WiFi_PTK/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_PTK'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_Perf/control.11g b/server/site_tests/network_WiFi_Perf/control.11g
index 8165d41..3b0e5ce 100644
--- a/server/site_tests/network_WiFi_Perf/control.11g
+++ b/server/site_tests/network_WiFi_Perf/control.11g
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.11g'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.11g_aes b/server/site_tests/network_WiFi_Perf/control.11g_aes
index 39603b3..9440f4d 100644
--- a/server/site_tests/network_WiFi_Perf/control.11g_aes
+++ b/server/site_tests/network_WiFi_Perf/control.11g_aes
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.11g_aes'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.11g_tkip b/server/site_tests/network_WiFi_Perf/control.11g_tkip
index 628d777..79b2022 100644
--- a/server/site_tests/network_WiFi_Perf/control.11g_tkip
+++ b/server/site_tests/network_WiFi_Perf/control.11g_tkip
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.11g_tkip'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.11g_wep b/server/site_tests/network_WiFi_Perf/control.11g_wep
index dea442e..e9c6ecb 100644
--- a/server/site_tests/network_WiFi_Perf/control.11g_wep
+++ b/server/site_tests/network_WiFi_Perf/control.11g_wep
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.11g_wep'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.ht20 b/server/site_tests/network_WiFi_Perf/control.ht20
index 2f7289e..2437840 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht20
+++ b/server/site_tests/network_WiFi_Perf/control.ht20
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.ht20'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.ht20_aes b/server/site_tests/network_WiFi_Perf/control.ht20_aes
index 4c53a87..0b54385 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht20_aes
+++ b/server/site_tests/network_WiFi_Perf/control.ht20_aes
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.ht20_aes'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.ht40 b/server/site_tests/network_WiFi_Perf/control.ht40
index 7d114a4..477ee83 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht40
+++ b/server/site_tests/network_WiFi_Perf/control.ht40
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.ht40'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.ht40_aes b/server/site_tests/network_WiFi_Perf/control.ht40_aes
index 2ebf3e4..ef5e536 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht40_aes
+++ b/server/site_tests/network_WiFi_Perf/control.ht40_aes
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.ht40_aes'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Perf/control.vht80 b/server/site_tests/network_WiFi_Perf/control.vht80
index 9f7dace..e5773a7 100644
--- a/server/site_tests/network_WiFi_Perf/control.vht80
+++ b/server/site_tests/network_WiFi_Perf/control.vht80
@@ -6,6 +6,7 @@
 NAME = 'network_WiFi_Perf.vht80'
 ATTRIBUTES = 'suite:wifi_perf'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
 
diff --git a/server/site_tests/network_WiFi_Powersave/control.wifi_ps b/server/site_tests/network_WiFi_Powersave/control.wifi_ps
index d8574b6..d82dca8 100644
--- a/server/site_tests/network_WiFi_Powersave/control.wifi_ps
+++ b/server/site_tests/network_WiFi_Powersave/control.wifi_ps
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Powersave.wifi_ps'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_Prefer5Ghz/control b/server/site_tests/network_WiFi_Prefer5Ghz/control
index 98ad852..0e139ef 100644
--- a/server/site_tests/network_WiFi_Prefer5Ghz/control
+++ b/server/site_tests/network_WiFi_Prefer5Ghz/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Prefer5Ghz'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_ProfileBasic/control b/server/site_tests/network_WiFi_ProfileBasic/control
index 1c67a1b..dc4497e 100644
--- a/server/site_tests/network_WiFi_ProfileBasic/control
+++ b/server/site_tests/network_WiFi_ProfileBasic/control
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_ProfileBasic'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wificell-pre-cq')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-pre-cq')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_ProfileGUID/control b/server/site_tests/network_WiFi_ProfileGUID/control
index 1b82816..7d02584 100644
--- a/server/site_tests/network_WiFi_ProfileGUID/control
+++ b/server/site_tests/network_WiFi_ProfileGUID/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_ProfileGUID'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_RandomMACAddress/control b/server/site_tests/network_WiFi_RandomMACAddress/control
index de159df..f5ed2b7 100644
--- a/server/site_tests/network_WiFi_RandomMACAddress/control
+++ b/server/site_tests/network_WiFi_RandomMACAddress/control
@@ -9,7 +9,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_matfunc, suite:wifi_matfunc_noservo'
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test verifies that MAC address randomization can be
diff --git a/server/site_tests/network_WiFi_Reassociate/control b/server/site_tests/network_WiFi_Reassociate/control
index 7367c08..70e7cca 100644
--- a/server/site_tests/network_WiFi_Reassociate/control
+++ b/server/site_tests/network_WiFi_Reassociate/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Reassociate'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_Regulatory/control b/server/site_tests/network_WiFi_Regulatory/control
index 33c019b..ce10d97 100644
--- a/server/site_tests/network_WiFi_Regulatory/control
+++ b/server/site_tests/network_WiFi_Regulatory/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Regulatory'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_Reset/control b/server/site_tests/network_WiFi_Reset/control
index 8dbb35c..e0a44e8 100644
--- a/server/site_tests/network_WiFi_Reset/control
+++ b/server/site_tests/network_WiFi_Reset/control
@@ -5,9 +5,10 @@
 AUTHOR = 'briannorris'
 NAME = 'network_WiFi_Reset'
 TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_matfunc, suite:wifi_matfunc_noservo'
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test verifies that if a network device can be reset from user space, that
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roam1xTLS b/server/site_tests/network_WiFi_Roam/control.wifi_roam1xTLS
index 99ce60c..afeea01 100644
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roam1xTLS
+++ b/server/site_tests/network_WiFi_Roam/control.wifi_roam1xTLS
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Roam.wifi_roam1xTLS'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roamNone b/server/site_tests/network_WiFi_Roam/control.wifi_roamNone
index fe53aac..c89054a 100644
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roamNone
+++ b/server/site_tests/network_WiFi_Roam/control.wifi_roamNone
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Roam.wifi_roamNone'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roamWEP b/server/site_tests/network_WiFi_Roam/control.wifi_roamWEP
index 864774d..26369f7 100644
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roamWEP
+++ b/server/site_tests/network_WiFi_Roam/control.wifi_roamWEP
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Roam.wifi_roamWEP'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roamWPA b/server/site_tests/network_WiFi_Roam/control.wifi_roamWPA
index d7db5c2..d428a3f 100644
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roamWPA
+++ b/server/site_tests/network_WiFi_Roam/control.wifi_roamWPA
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_Roam.wifi_roamWPA'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_RoamDbus/control b/server/site_tests/network_WiFi_RoamDbus/control
index 61b6fd7..9c6e867 100644
--- a/server/site_tests/network_WiFi_RoamDbus/control
+++ b/server/site_tests/network_WiFi_RoamDbus/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_RoamDbus'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_RoamFT/control.EAP b/server/site_tests/network_WiFi_RoamFT/control.EAP
index 84fa2d8..34c7cb5 100644
--- a/server/site_tests/network_WiFi_RoamFT/control.EAP
+++ b/server/site_tests/network_WiFi_RoamFT/control.EAP
@@ -6,7 +6,7 @@
 TIME = 'SHORT'
 NAME = 'network_WiFi_RoamFT.EAP'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_RoamFT/control.PSK b/server/site_tests/network_WiFi_RoamFT/control.PSK
index 40e3898..30838e1 100644
--- a/server/site_tests/network_WiFi_RoamFT/control.PSK
+++ b/server/site_tests/network_WiFi_RoamFT/control.PSK
@@ -6,7 +6,7 @@
 TIME = 'SHORT'
 NAME = 'network_WiFi_RoamFT.PSK'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_RoamFT/control.mixedEAP b/server/site_tests/network_WiFi_RoamFT/control.mixedEAP
index 90eaf67..20285d4 100644
--- a/server/site_tests/network_WiFi_RoamFT/control.mixedEAP
+++ b/server/site_tests/network_WiFi_RoamFT/control.mixedEAP
@@ -6,7 +6,7 @@
 TIME = 'SHORT'
 NAME = 'network_WiFi_RoamFT.mixedEAP'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_RoamFT/control.mixedPSK b/server/site_tests/network_WiFi_RoamFT/control.mixedPSK
index 3199078..9c76620 100644
--- a/server/site_tests/network_WiFi_RoamFT/control.mixedPSK
+++ b/server/site_tests/network_WiFi_RoamFT/control.mixedPSK
@@ -6,7 +6,7 @@
 TIME = 'SHORT'
 NAME = 'network_WiFi_RoamFT.mixedPSK'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_RxFrag/control.wifi_rxfrag b/server/site_tests/network_WiFi_RxFrag/control.wifi_rxfrag
index 8ea8411..79431d6 100644
--- a/server/site_tests/network_WiFi_RxFrag/control.wifi_rxfrag
+++ b/server/site_tests/network_WiFi_RxFrag/control.wifi_rxfrag
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_RxFrag.wifi_rxfrag'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_ScanPerformance/control b/server/site_tests/network_WiFi_ScanPerformance/control
index ae8c3df..092caeb 100644
--- a/server/site_tests/network_WiFi_ScanPerformance/control
+++ b/server/site_tests/network_WiFi_ScanPerformance/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_ScanPerformance'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SecChange/control b/server/site_tests/network_WiFi_SecChange/control
index 5869e7f..9e14704 100644
--- a/server/site_tests/network_WiFi_SecChange/control
+++ b/server/site_tests/network_WiFi_SecChange/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_SecChange'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SetOptionalDhcpProperties/control b/server/site_tests/network_WiFi_SetOptionalDhcpProperties/control
index d130987..540abc0 100644
--- a/server/site_tests/network_WiFi_SetOptionalDhcpProperties/control
+++ b/server/site_tests/network_WiFi_SetOptionalDhcpProperties/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_SetOptionalDhcpProperties'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11a b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11a
index 07ea339..e0545cf 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11a
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11a
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check11a'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11b b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11b
index a3b017b..c6143a7 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11b
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11b
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check11b'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11g b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11g
index 073d5d3..06dcd04 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11g
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11g
@@ -6,8 +6,8 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check11g'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi, suite:wificell-pre-cq')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release,'
+              'subsystem:wifi, suite:wificell-pre-cq')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_PEAP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_PEAP
index f3db683..1735a65 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_PEAP
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_PEAP
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check1x_PEAP'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_TTLS b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_TTLS
index 204b7dc..0e37918 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_TTLS
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_TTLS
@@ -6,8 +6,8 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check1x_TTLS'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi, suite:wificell-pre-cq')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release,'
+              'subsystem:wifi, suite:wificell-pre-cq')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WEP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WEP
index 5ec6473..a9397b0 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WEP
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WEP
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check1x_WEP'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WPA b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WPA
index 87b1629..6b65ba1 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WPA
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WPA
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check1x_WPA'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT20 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT20
index 2654c1f..32734d9 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT20
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT20
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check24HT20'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT40 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT40
index 59b0a3b..c37b4fe 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT40
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT40
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check24HT40'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT20 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT20
index ba72ef0..c4cccd5 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT20
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT20
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check5HT20'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT40 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT40
index 3f7d03f..ddacda3 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT40
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5HT40
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check5HT40'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_mixed b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_mixed
index de1cce7..65994e5 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_mixed
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_mixed
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check5VHT80_mixed'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_pure b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_pure
index 0efad46..59ee263 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_pure
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check5VHT80_pure
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_check5VHT80_pure'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkDFS b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkDFS
index 37453e3..e535c1c 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkDFS
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkDFS
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkDFS'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHidden b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHidden
index 558b6da..3da1368 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHidden
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHidden
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkHidden'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWEP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWEP
index 3c61509..06a056b 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWEP
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWEP
@@ -7,8 +7,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 ATTRIBUTES = ('suite:wifi_flaky, suite:wifi_matfunc,'
-              'suite:wifi_matfunc_noservo, suite:wifi_release,'
-	      'subsystem:wifi')
+              'suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWPA b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWPA
index 762dcf5..a84a779 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWPA
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWPA
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkHiddenWPA'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkMixedWPA b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkMixedWPA
index 61c1d8c..ad376bb 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkMixedWPA
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkMixedWPA
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkMixedWPA'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkNonAsciiSSID b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkNonAsciiSSID
index 2e56c87..b011468 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkNonAsciiSSID
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkNonAsciiSSID
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkNonAsciiSSID'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkOddWPAPassphrases b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkOddWPAPassphrases
index cdfb3f9..14935d6 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkOddWPAPassphrases
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkOddWPAPassphrases
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkOddWPAPassphrases'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkRawPMK b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkRawPMK
index 1837f85..ec402b4 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkRawPMK
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkRawPMK
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkRawPMK'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkSSIDLimits b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkSSIDLimits
index fbf9d81..83674bf 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkSSIDLimits
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkSSIDLimits
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkSSIDLimits'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkTruncatedBeacon b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkTruncatedBeacon
index ae0c78e..8cc8cec 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkTruncatedBeacon
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkTruncatedBeacon
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkTruncatedBeacon'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP104 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP104
index fbc3f09..9c315c6 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP104
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP104
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWEP104'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP40 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP40
index ea699f9..01f08c7 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP40
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP40
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWEP40'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2
index 90e6c75..b6b027d 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA2'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_PMF b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_PMF
index a81bfe3..adcf58d 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_PMF
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_PMF
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA2_CCMP_PMF'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_TKIP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_TKIP
index 36fcc3b..e0d08c0 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_TKIP
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_TKIP
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA2_TKIP'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_CCMP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_CCMP
index a54c059..a0af852 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_CCMP
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_CCMP
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA_CCMP'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_TKIP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_TKIP
index c3060f1..24ee762 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_TKIP
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_TKIP
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA_TKIP'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_multi b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_multi
index 73e9023..d5fced1 100644
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_multi
+++ b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_multi
@@ -6,8 +6,7 @@
 NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA_multi'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo,'
-              'suite:wifi_release, subsystem:wifi')
+ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release, subsystem:wifi')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.11g b/server/site_tests/network_WiFi_SuspendStress/control.11g
index 5ec21fa..e128657 100644
--- a/server/site_tests/network_WiFi_SuspendStress/control.11g
+++ b/server/site_tests/network_WiFi_SuspendStress/control.11g
@@ -8,7 +8,7 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test uses powerd_dbus to suspend and resume and checks that the
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.24HT40 b/server/site_tests/network_WiFi_SuspendStress/control.24HT40
index af96f6d..acbc7a69 100644
--- a/server/site_tests/network_WiFi_SuspendStress/control.24HT40
+++ b/server/site_tests/network_WiFi_SuspendStress/control.24HT40
@@ -8,7 +8,7 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test uses powerd_dbus to suspend and resume and checks that the
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.5HT40 b/server/site_tests/network_WiFi_SuspendStress/control.5HT40
index ca61b3c..f882e41 100644
--- a/server/site_tests/network_WiFi_SuspendStress/control.5HT40
+++ b/server/site_tests/network_WiFi_SuspendStress/control.5HT40
@@ -8,7 +8,7 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test uses powerd_dbus to suspend and resume and checks that the
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.Hidden b/server/site_tests/network_WiFi_SuspendStress/control.Hidden
index b01db49..d6af8a9 100644
--- a/server/site_tests/network_WiFi_SuspendStress/control.Hidden
+++ b/server/site_tests/network_WiFi_SuspendStress/control.Hidden
@@ -8,7 +8,7 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test uses powerd_dbus to suspend and resume and checks that the
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.VHT80 b/server/site_tests/network_WiFi_SuspendStress/control.VHT80
index df5d25d..034463d 100644
--- a/server/site_tests/network_WiFi_SuspendStress/control.VHT80
+++ b/server/site_tests/network_WiFi_SuspendStress/control.VHT80
@@ -8,7 +8,7 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test uses powerd_dbus to suspend and resume and checks that the
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.WEP40 b/server/site_tests/network_WiFi_SuspendStress/control.WEP40
index 77653b5..8196206 100644
--- a/server/site_tests/network_WiFi_SuspendStress/control.WEP40
+++ b/server/site_tests/network_WiFi_SuspendStress/control.WEP40
@@ -8,7 +8,7 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test uses powerd_dbus to suspend and resume and checks that the
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.WPA2 b/server/site_tests/network_WiFi_SuspendStress/control.WPA2
index 8d57aac..b7f97d7 100644
--- a/server/site_tests/network_WiFi_SuspendStress/control.WPA2
+++ b/server/site_tests/network_WiFi_SuspendStress/control.WPA2
@@ -8,7 +8,7 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 
 DOC = """
 This test uses powerd_dbus to suspend and resume and checks that the
diff --git a/server/site_tests/network_WiFi_TDLSPing/control b/server/site_tests/network_WiFi_TDLSPing/control
index 3155651..7460121 100644
--- a/server/site_tests/network_WiFi_TDLSPing/control
+++ b/server/site_tests/network_WiFi_TDLSPing/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_TDLSPing'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/network_WiFi_VisibleScan/control b/server/site_tests/network_WiFi_VisibleScan/control
index 8f833d4..a45398a 100644
--- a/server/site_tests/network_WiFi_VisibleScan/control
+++ b/server/site_tests/network_WiFi_VisibleScan/control
@@ -6,7 +6,7 @@
 NAME = 'network_WiFi_VisibleScan'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_matfunc_noservo')
+ATTRIBUTES = ('suite:wifi_matfunc')
 DEPENDENCIES = 'wificell'
 
 DOC = """
diff --git a/server/site_tests/platform_SuspendResumeTiming/platform_SuspendResumeTiming.py b/server/site_tests/platform_SuspendResumeTiming/platform_SuspendResumeTiming.py
index b3c70cb..56d10d7 100644
--- a/server/site_tests/platform_SuspendResumeTiming/platform_SuspendResumeTiming.py
+++ b/server/site_tests/platform_SuspendResumeTiming/platform_SuspendResumeTiming.py
@@ -20,6 +20,8 @@
 _SUSPEND_TIME = 15
 _TIME_TO_RESUME_BAR = 3
 _TIME_TO_SUSPEND_BAR = 3
+_SLEEP_AFTER_RESUME = 60
+_SLEEP_AFTER_REBOOT = 30
 
 
 class platform_SuspendResumeTiming(test.test):
@@ -114,6 +116,7 @@
 
         # Reboot to create new powerd.Latest log file.
         self.host.reboot()
+        time.sleep(_SLEEP_AFTER_REBOOT)
 
         # Test user login.
         autotest_client = autotest.Autotest(self.host)
@@ -135,6 +138,7 @@
             self.host.suspend(suspend_time=_SUSPEND_TIME)
         except error.AutoservSuspendError:
             pass
+        time.sleep(_SLEEP_AFTER_RESUME)
         self.host.run('sync')
 
 
diff --git a/server/site_tests/policy_WiFiTypesServer/control.wpa_peap_gtc b/server/site_tests/policy_WiFiTypesServer/control.wpa_peap_gtc
index 1982715..5cc1b11 100644
--- a/server/site_tests/policy_WiFiTypesServer/control.wpa_peap_gtc
+++ b/server/site_tests/policy_WiFiTypesServer/control.wpa_peap_gtc
@@ -42,7 +42,7 @@
             security_config=eap_config)
 
     network = NetworkConfig(security='WPA-EAP',
-                            eap='PEAP-GTC',
+                            eap='PEAP',
                             identity=identity,
                             password=password,
                             ca_cert=site_eap_certs.ca_cert_1)
diff --git a/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py b/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py
index aa7fc65..2dd6eb4 100644
--- a/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py
+++ b/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py
@@ -43,6 +43,9 @@
     # page_cycler tests are deprecated. Replace them with loading.desktop.
     ('loading.desktop', ('--pageset-repeat=1','--story-tag-filter=typical')),
     ('loading.desktop', ('--pageset-repeat=1','--story-tag-filter=intl_ja_zh')),
+    ('rendering.desktop',
+      ('--story-tag-filter=tough_canvas',
+       '--story-filter="bouncing\\*\\|canvas\\*\\|microsoft\\*"')),
     ('octane',),
     ('kraken',),
     ('speedometer',),
diff --git a/site_utils/attribute_whitelist.txt b/site_utils/attribute_whitelist.txt
index d487234..efbc624 100644
--- a/site_utils/attribute_whitelist.txt
+++ b/site_utils/attribute_whitelist.txt
@@ -142,6 +142,7 @@
 suite:kernel_usb
 suite:kernel_usb_set1
 suite:kernel_weekly_regression
+suite:kiosk_longevity
 suite:link_perf
 suite:longevity
 suite:longevity_two
@@ -212,7 +213,6 @@
 suite:wifi_interop_wpa2
 suite:wifi_lucidsleep
 suite:wifi_matfunc
-suite:wifi_matfunc_noservo
 suite:wifi_perf
 suite:wifi_release
 suite:wifi_stress
diff --git a/site_utils/chromeos_proxy/swarming_bots.py b/site_utils/chromeos_proxy/swarming_bots.py
index 42c2281..bdd0ae2 100755
--- a/site_utils/chromeos_proxy/swarming_bots.py
+++ b/site_utils/chromeos_proxy/swarming_bots.py
@@ -213,6 +213,7 @@
         os.makedirs(self.bot_dir)
         dest = os.path.join(self.bot_dir, self.BOT_FILENAME)
         new_env = dict(os.environ)
+        new_env['SWARMING_EXTERNAL_BOT_SETUP'] = '1'
         logging.debug('[Bot %s] Getting bot code from: %s/bot_code',
                       self.bot_id, self.swarming_proxy)
         if self.specify_bot_id:
diff --git a/site_utils/deployment/COMMIT-QUEUE.ini b/site_utils/deployment/COMMIT-QUEUE.ini
new file mode 100644
index 0000000..7fc87a7
--- /dev/null
+++ b/site_utils/deployment/COMMIT-QUEUE.ini
@@ -0,0 +1,11 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Per-project Commit Queue settings.
+# Documentation: http://goo.gl/5J7oND
+
+[GENERAL]
+
+pre-cq-configs: moblab-generic-vm-pre-cq
+submit-in-pre-cq: yes
diff --git a/site_utils/deployment/install.py b/site_utils/deployment/install.py
index 888fd8c..cece839 100644
--- a/site_utils/deployment/install.py
+++ b/site_utils/deployment/install.py
@@ -69,6 +69,7 @@
 from autotest_lib.client.common_lib import time_utils
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros import retry
+from autotest_lib.server import afe_utils
 from autotest_lib.server import constants
 from autotest_lib.server import frontend
 from autotest_lib.server import hosts
@@ -477,19 +478,23 @@
 
 
 def _create_host_for_installation(host, arguments):
-  """Creates a hosts.CrosHost object to be used for installation.
+    """Creates a context manager of hosts.CrosHost object for installation.
 
-  The returned host object is agnostic of the infrastructure environment. In
-  particular, it does not have any references to the AFE.
+    The host object yielded by the returned context manager is agnostic of the
+    infrastructure environment. In particular, it does not have any references
+    to the AFE.
 
-  @param host: A server.hosts.CrosHost object.
-  @param arguments: Parsed commandline arguments for this script.
-  """
-  info = host.host_info_store.get()
-  s_host, s_port, s_serial = _extract_servo_attributes(host.hostname,
-                                                       info.attributes)
-  return preparedut.create_host(host.hostname, arguments.board, arguments.model,
-                                s_host, s_port, s_serial)
+    @param host: A server.hosts.CrosHost object.
+    @param arguments: Parsed commandline arguments for this script.
+
+    @return a context manager which yields hosts.CrosHost object.
+    """
+    info = host.host_info_store.get()
+    s_host, s_port, s_serial = _extract_servo_attributes(host.hostname,
+                                                         info.attributes)
+    return preparedut.create_host(host.hostname, arguments.board,
+                                  arguments.model, s_host, s_port, s_serial,
+                                  arguments.logdir)
 
 
 def _install_test_image(host, arguments):
@@ -501,14 +506,13 @@
     @param host       Host instance for the DUT being installed.
     @param arguments  Command line arguments with options.
     """
+    repair_image = _get_cros_repair_image_name(host)
+    logging.info('Using repair image %s', repair_image)
     if arguments.dry_run:
         return
     if arguments.stageusb:
         try:
-            preparedut.download_image_to_servo_usb(
-                    host,
-                    host.get_cros_repair_image_name(),
-            )
+            preparedut.download_image_to_servo_usb(host, repair_image)
         except Exception as e:
             logging.exception('Failed to stage image on USB: %s', e)
             raise Exception('USB staging failed')
@@ -516,7 +520,7 @@
         try:
             if arguments.using_servo:
                 logging.debug('Install FW using servo.')
-                preparedut.flash_firmware_using_servo(host)
+                preparedut.flash_firmware_using_servo(host, repair_image)
             else:
                 logging.debug('Install FW by chromeos-firmwareupdate.')
                 preparedut.install_firmware(host, arguments.force_firmware)
@@ -556,10 +560,8 @@
     host = None
     try:
         host = _create_host(hostname, afe, afe_host)
-        _install_test_image(
-            _create_host_for_installation(host, arguments),
-            arguments,
-        )
+        with _create_host_for_installation(host, arguments) as host_to_install:
+            _install_test_image(host_to_install, arguments)
 
         if arguments.install_test_image and not arguments.dry_run:
             host.labels.update_labels(host)
@@ -829,6 +831,18 @@
     return host_attributes
 
 
+def _get_cros_repair_image_name(host):
+    """Get the CrOS repair image name for given host.
+
+    @param host: hosts.CrosHost object. This object need not have an AFE
+                 reference.
+    """
+    info = host.host_info_store.get()
+    if not info.board:
+        raise InstallFailedError('Unknown board for given host')
+    return afe_utils.get_stable_cros_image_name(info.board)
+
+
 def install_duts(arguments):
     """Install a test image on DUTs, and deploy them.
 
diff --git a/site_utils/deployment/prepare/dut.py b/site_utils/deployment/prepare/dut.py
index 2e1a298..37dec6b 100644
--- a/site_utils/deployment/prepare/dut.py
+++ b/site_utils/deployment/prepare/dut.py
@@ -12,6 +12,7 @@
 from __future__ import division
 from __future__ import print_function
 
+import contextlib
 import time
 
 import common
@@ -26,9 +27,10 @@
 _FIRMWARE_UPDATE_TIMEOUT = 600
 
 
+@contextlib.contextmanager
 def create_host(hostname, board, model, servo_hostname, servo_port,
-                servo_serial=None):
-    """Create a server.hosts.CrosHost object to use for DUT preparation.
+                servo_serial=None, logs_dir=None):
+    """Yield a server.hosts.CrosHost object to use for DUT preparation.
 
     This object contains just enough inventory data to be able to prepare the
     DUT for lab deployment. It does not contain any reference to AFE / Skylab so
@@ -41,8 +43,10 @@
     @param servo_hostname:  FQDN of the servo host controlling the DUT.
     @param servo_port:      Servo host port used for the controlling servo.
     @param servo_serial:    (Optional) Serial number of the controlling servo.
+    @param logs_dir:        (Optional) Directory to save logs obtained from the
+                            host.
 
-    @return a server.hosts.Host object.
+    @yield a server.hosts.Host object.
     """
     labels = [
             'board:%s' % board,
@@ -65,11 +69,15 @@
             'afe_host': server_utils.EmptyAFEHost(),
     }
     host = hosts.create_host(machine_dict)
-    servo = servo_host.ServoHost(
+    servohost = servo_host.ServoHost(
             **servo_host.get_servo_args_for_host(host))
-    _prepare_servo(servo)
-    host.set_servo_host(servo)
-    return host
+    _prepare_servo(servohost)
+    host.set_servo_host(servohost)
+    host.servo.uart_logs_dir = logs_dir
+    try:
+        yield host
+    finally:
+        host.close()
 
 
 def download_image_to_servo_usb(host, build):
@@ -81,14 +89,75 @@
     host.servo.image_to_servo_usb(host.stage_image_for_servo(build))
 
 
-def flash_firmware_using_servo(host):
+def install_test_image(host):
+    """Install the test image for the given build to DUT.
+
+    This function assumes that the required image is already downloaded onto the
+    USB key connected to the DUT via servo.
+
+    @param host   servers.host.Host object.
+    """
+    host.servo_install()
+
+
+def flash_firmware_using_servo(host, build):
     """Flash DUT firmware directly using servo.
 
     Rather than running `chromeos-firmwareupdate` on DUT, we can flash DUT
     firmware directly using servo (run command `flashrom`, etc. on servo). In
     this way, we don't require DUT to be in dev mode and with dev_boot_usb
     enabled."""
-    host.firmware_install(build=host.get_cros_repair_image_name())
+    host.firmware_install(build)
+
+
+def install_firmware(host, force):
+    """Install dev-signed firmware after removing write-protect.
+
+    At start, it's assumed that hardware write-protect is disabled,
+    the DUT is in dev mode, and the servo's USB stick already has a
+    test image installed.
+
+    The firmware is installed by powering on and typing ctrl+U on
+    the keyboard in order to boot the test image from USB.  Once
+    the DUT is booted, we run a series of commands to install the
+    read-only firmware from the test image.  Then we clear debug
+    mode, and shut down.
+
+    @param host   Host instance to use for servo and ssh operations.
+    @param force  Boolean value determining if firmware install is forced.
+    """
+    servo = host.servo
+    # First power on.  We sleep to allow the firmware plenty of time
+    # to display the dev-mode screen; some boards take their time to
+    # be ready for the ctrl+U after power on.
+    servo.get_power_state_controller().power_off()
+    servo.switch_usbkey('dut')
+    servo.get_power_state_controller().power_on()
+    time.sleep(10)
+    # Dev mode screen should be up now:  type ctrl+U and wait for
+    # boot from USB to finish.
+    servo.ctrl_u()
+    if not host.wait_up(timeout=host.USB_BOOT_TIMEOUT):
+        raise Exception('DUT failed to boot in dev mode for '
+                        'firmware update')
+    # Disable software-controlled write-protect for both FPROMs, and
+    # install the RO firmware.
+    for fprom in ['host', 'ec']:
+        host.run('flashrom -p %s --wp-disable' % fprom,
+                 ignore_status=True)
+
+    fw_update_log = '/mnt/stateful_partition/home/root/cros-fw-update.log'
+    pid = _start_firmware_update(host, force, fw_update_log)
+    _wait_firmware_update_process(host, pid)
+    _check_firmware_update_result(host, fw_update_log)
+
+    # Get us out of dev-mode and clear GBB flags.  GBB flags are
+    # non-zero because boot from USB was enabled.
+    host.run('/usr/share/vboot/bin/set_gbb_flags.sh 0',
+             ignore_status=True)
+    host.run('crossystem disable_dev_request=1',
+             ignore_status=True)
+    host.halt()
 
 
 def _start_firmware_update(host, force, result_file):
@@ -151,71 +220,10 @@
         raise Exception("chromeos-firmwareupdate failed!")
 
 
-def install_firmware(host, force):
-    """Install dev-signed firmware after removing write-protect.
-
-    At start, it's assumed that hardware write-protect is disabled,
-    the DUT is in dev mode, and the servo's USB stick already has a
-    test image installed.
-
-    The firmware is installed by powering on and typing ctrl+U on
-    the keyboard in order to boot the test image from USB.  Once
-    the DUT is booted, we run a series of commands to install the
-    read-only firmware from the test image.  Then we clear debug
-    mode, and shut down.
-
-    @param host   Host instance to use for servo and ssh operations.
-    @param force  Boolean value determining if firmware install is forced.
-    """
-    servo = host.servo
-    # First power on.  We sleep to allow the firmware plenty of time
-    # to display the dev-mode screen; some boards take their time to
-    # be ready for the ctrl+U after power on.
-    servo.get_power_state_controller().power_off()
-    servo.switch_usbkey('dut')
-    servo.get_power_state_controller().power_on()
-    time.sleep(10)
-    # Dev mode screen should be up now:  type ctrl+U and wait for
-    # boot from USB to finish.
-    servo.ctrl_u()
-    if not host.wait_up(timeout=host.USB_BOOT_TIMEOUT):
-        raise Exception('DUT failed to boot in dev mode for '
-                        'firmware update')
-    # Disable software-controlled write-protect for both FPROMs, and
-    # install the RO firmware.
-    for fprom in ['host', 'ec']:
-        host.run('flashrom -p %s --wp-disable' % fprom,
-                 ignore_status=True)
-
-    fw_update_log = '/mnt/stateful_partition/home/root/cros-fw-update.log'
-    pid = _start_firmware_update(host, force, fw_update_log)
-    _wait_firmware_update_process(host, pid)
-    _check_firmware_update_result(host, fw_update_log)
-
-    # Get us out of dev-mode and clear GBB flags.  GBB flags are
-    # non-zero because boot from USB was enabled.
-    host.run('/usr/share/vboot/bin/set_gbb_flags.sh 0',
-             ignore_status=True)
-    host.run('crossystem disable_dev_request=1',
-             ignore_status=True)
-    host.halt()
-
-
-def install_test_image(host):
-    """Install the test image for the given build to DUT.
-
-    This function assumes that the required image is already downloaded onto the
-    USB key connected to the DUT via servo.
-
-    @param host   servers.host.Host object.
-    """
-    host.servo_install()
-
-
-def _prepare_servo(servo):
+def _prepare_servo(servohost):
     """Prepare servo connected to host for installation steps.
 
-    @param servo  A server.hosts.ServoHost object.
+    @param servohost  A server.hosts.servo_host.ServoHost object.
     """
     # Stopping `servod` on the servo host will force `repair()` to
     # restart it.  We want that restart for a few reasons:
@@ -225,12 +233,12 @@
     #   + If there's a problem with servod that verify and repair
     #     can't find, this provides a UI through which `servod` can
     #     be restarted.
-    servo.run('stop servod PORT=%d' % servo.servo_port,
-              ignore_status=True)
-    servo.repair()
+    servohost.run('stop servod PORT=%d' % servohost.servo_port,
+                  ignore_status=True)
+    servohost.repair()
 
     # Don't timeout probing for the host usb device, there could be a bunch
     # of servos probing at the same time on the same servo host.  And
     # since we can't pass None through the xml rpcs, use 0 to indicate None.
-    if not servo.get_servo().probe_host_usb_dev(timeout=0):
+    if not servohost.get_servo().probe_host_usb_dev(timeout=0):
         raise Exception('No USB stick detected on Servo host')
diff --git a/site_utils/deployment/prepare/main.py b/site_utils/deployment/prepare/main.py
old mode 100644
new mode 100755
index 2c2a642..9a40a39
--- a/site_utils/deployment/prepare/main.py
+++ b/site_utils/deployment/prepare/main.py
@@ -1,43 +1,53 @@
-#!/usr/bin/env python
+#!/usr/bin/python -u
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-"""Tool to (re)prepare a DUT for lab deployment.
-
-TODO(this docstring is a stub).
-"""
+"""Tool to (re)prepare a DUT for lab deployment."""
 
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
 
 import argparse
+import errno
 import logging
+import logging.config
+import os
 
-import  common
+import common
+from autotest_lib.server import afe_utils
+from autotest_lib.server.hosts import file_store
 from autotest_lib.site_utils.deployment.prepare import dut as preparedut
 
+
+class DutPreparationError(Exception):
+  """Generic error raised during DUT preparation."""
+
+
 def main():
-  """Tool to (re)prepare a DUT for lab deployment.
-
-  TODO(this docstring is a stub).
-  """
+  """Tool to (re)prepare a DUT for lab deployment."""
   opts = _parse_args()
-  # Setup tempfile to capture trash dropped by autotest?
-  # Setup logging to avoid dumping everything to stdout?
-  logging.basicConfig(level=logging.DEBUG)
+  _configure_logging('prepare_dut', os.path.join(opts.results_dir, _LOG_FILE))
 
-  host = preparedut.create_host(
-      opts.hostname, opts.board, opts.model, opts.servo_hostname,
-      opts.servo_port, opts.servo_serial)
+  info = _read_store(opts.host_info_file)
+  repair_image = _get_cros_repair_image_name(info.board)
+  logging.info('Using repair image %s, obtained from AFE', repair_image)
+  with _create_host(opts.hostname, info, opts.results_dir) as host:
+    if opts.dry_run:
+      logging.info('DRY RUN: Would have run actions %s', opts.actions)
+      return
 
-  if 'stage-usb' in opts.actions:
-    preparedut.download_image_to_servo_usb(host, opts.build)
-  if 'install-firmware' in opts.actions:
-    preparedut.install_firmware(host, opts.force_firmware)
-  if 'install-test-image' in opts.actions:
-    preparedut.install_test_image(host)
+    if 'stage-usb' in opts.actions:
+      preparedut.download_image_to_servo_usb(host, repair_image)
+    if 'install-firmware' in opts.actions:
+      preparedut.install_firmware(host, opts.force_firmware)
+    if 'install-test-image' in opts.actions:
+      preparedut.install_test_image(host)
+
+
+_LOG_FILE = 'prepare_dut.log'
+_DUT_LOGS_DIR = 'dut_logs'
 
 
 def _parse_args():
@@ -51,42 +61,30 @@
       help='DUT preparation actions to execute.',
   )
   parser.add_argument(
+      '--dry-run',
+      action='store_true',
+      default=False,
+      help='Run in dry-run mode. No changes will be made to the DUT.',
+  )
+  parser.add_argument(
+      '--results-dir',
+      required=True,
+      help='Directory to drop logs and output artifacts in.',
+  )
+
+  parser.add_argument(
       '--hostname',
       required=True,
       help='Hostname of the DUT to prepare.',
   )
-
   parser.add_argument(
-      '--board',
+      '--host-info-file',
       required=True,
-      help='Board label of the DUT to prepare.',
-  )
-  parser.add_argument(
-      '--model',
-      required=True,
-      help='Model label of the DUT to prepare.',
-  )
-  parser.add_argument(
-      '--build',
-      required=True,
-      help='Chrome OS image version to use for installation.',
+      help=('Full path to HostInfo file.'
+            ' DUT inventory information is read from the HostInfo file.'),
   )
 
   parser.add_argument(
-      '--servo-hostname',
-      required=True,
-      help='Hostname of the servo host connected to the DUT.',
-  )
-  parser.add_argument(
-      '--servo-port',
-      required=True,
-      help='Servo host port (to be) used for the controlling servo.',
-  )
-  parser.add_argument(
-      '--servo-serial',
-      help='Serial number of the controlling servo.',
-  )
-  parser.add_argument(
       '--force-firmware',
       action='store_true',
       help='Force firmware isntallation via chromeos-installfirmware.',
@@ -95,5 +93,100 @@
   return parser.parse_args()
 
 
+def _configure_logging(name, tee_file):
+    """Configure logging globally.
+
+    @param name: Name to prepend to log messages.
+                 This should be the name of the program.
+    @param tee_file: File to tee logs to, in addition to stderr.
+    """
+    logging.config.dictConfig({
+        'version': 1,
+        'formatters': {
+            'stderr': {
+                'format': ('{name}: '
+                           '%(asctime)s:%(levelname)s'
+                           ':%(module)s:%(funcName)s:%(lineno)d'
+                           ': %(message)s'
+                           .format(name=name)),
+            },
+            'tee_file': {
+                'format': ('%(asctime)s:%(levelname)s'
+                           ':%(module)s:%(funcName)s:%(lineno)d'
+                           ': %(message)s'),
+            },
+        },
+        'handlers': {
+            'stderr': {
+                'class': 'logging.StreamHandler',
+                'formatter': 'stderr',
+            },
+            'tee_file': {
+                'class': 'logging.FileHandler',
+                'formatter': 'tee_file',
+                'filename': tee_file,
+            },
+        },
+        'root': {
+            'level': 'DEBUG',
+            'handlers': ['stderr', 'tee_file'],
+        },
+        'disable_existing_loggers': False,
+    })
+
+
+def _read_store(path):
+  """Read a HostInfo from a file at path."""
+  store = file_store.FileStore(path)
+  return store.get()
+
+
+def _create_host(hostname, info, results_dir):
+  """Yield a hosts.CrosHost object with the given inventory information.
+
+  @param hostname: Hostname of the DUT.
+  @param info: A HostInfo with the inventory information to use.
+  @param results_dir: Path to directory for logs / output artifacts.
+  @yield server.hosts.CrosHost object.
+  """
+  if not info.board:
+    raise DutPreparationError('No board in DUT labels')
+  if not info.model:
+    raise DutPreparationError('No model in DUT labels')
+
+  servo_args = {}
+  if 'servo_host' not in info.attributes:
+    raise DutPreparationError('No servo_host in DUT attributes')
+  if 'servo_port' not in info.attributes:
+    raise DutPreparationError('No servo_port in DUT attributes')
+
+  dut_logs_dir = os.path.join(results_dir, _DUT_LOGS_DIR)
+  try:
+    os.makedirs(dut_logs_dir)
+  except OSError as e:
+    if e.errno != errno.EEXIST:
+      raise
+
+  return preparedut.create_host(
+      hostname,
+      info.board,
+      info.model,
+      info.attributes['servo_host'],
+      info.attributes['servo_port'],
+      info.attributes.get('servo_serial', ''),
+      dut_logs_dir,
+  )
+
+
+def _get_cros_repair_image_name(board):
+  """Get the CrOS repair image name for given host.
+
+  TODO(pprabhu): This is an evil function with dependence on the environment
+  (global_config information) and the AFE. Remove this dependence when stable
+  image mappings move off of the AFE.
+  """
+  return afe_utils.get_stable_cros_image_name(board)
+
+
 if __name__ == '__main__':
   main()
diff --git a/site_utils/run_suite.py b/site_utils/run_suite.py
index ac8a4df..35bbe5b 100755
--- a/site_utils/run_suite.py
+++ b/site_utils/run_suite.py
@@ -54,9 +54,19 @@
 
 import common
 from chromite.lib import buildbot_annotations as annotations
+from chromite.lib import gs
+from chromite.lib import osutils
 
 from django.core import exceptions as django_exceptions
 
+try:
+    from suite_scheduler import config_reader
+    from suite_scheduler import skylab
+except ImportError:
+    # For unittest
+    config_reader = None
+    skylab = None
+
 from autotest_lib.client.common_lib import control_data
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import global_config
@@ -90,6 +100,13 @@
 _DEFAULT_AUTOTEST_INSTANCE = CONFIG.get_config_value(
         'SERVER', 'hostname', type=str)
 _URL_PATTERN = CONFIG.get_config_value('CROS', 'log_url_pattern', type=str)
+_ENABLE_RUN_SUITE_TRAMPOLINE = CONFIG.get_config_value(
+        'CROS', 'enable_run_suite_trampoline', type=bool, default=False)
+
+_MIGRATION_CONFIG_FILE = 'migration_config.ini'
+_MIGRATION_CONFIG_BUCKET = 'suite-scheduler.google.com.a.appspot.com'
+_TRAMPOLINE_CONFIG = 'gs://%s/%s' % (_MIGRATION_CONFIG_BUCKET,
+                                     _MIGRATION_CONFIG_FILE)
 
 # Minimum RPC timeout setting for calls expected to take long time, e.g.,
 # create_suite_job. If default socket time (socket.getdefaulttimeout()) is
@@ -2037,6 +2054,61 @@
         sys.exit(run_suite_common.RETURN_CODES.INFRA_FAILURE)
 
 
+def _check_if_use_skylab(options):
+    """Detect whether to run suite in skylab."""
+    if not _ENABLE_RUN_SUITE_TRAMPOLINE:
+        logging.info('trampoline to skylab is not enabled.')
+        return False
+
+    task_info = 'suite:%s, board:%s, model:%s, pool:%s' % (
+            options.name, options.board, options.model, options.pool)
+    ctx = gs.GSContext()
+    with osutils.TempDir(prefix='trampoline_') as tempdir:
+        temp_file = os.path.join(tempdir, _MIGRATION_CONFIG_FILE)
+        ctx.Copy(_TRAMPOLINE_CONFIG, temp_file)
+        _migration_config = config_reader.MigrationConfig(
+                config_reader.ConfigReader(temp_file))
+
+        logging.info('Checking whether to run in skylab: Task(%s)', task_info)
+        if skylab.should_run_in_skylab(_migration_config,
+                                       options.board,
+                                       options.model,
+                                       options.name,
+                                       options.pool):
+            logging.info('Task (%s) Should run in skylab', task_info)
+            return True
+
+    logging.info('Task (%s) Should run in autotest', task_info)
+    return False
+
+
+def _run_with_skylab(options):
+    """Run suite inside skylab."""
+    # TODO(xixuan): Implement running suite in skylab.
+    return _RETURN_RESULTS['ok']
+
+
+def _run_with_autotest(options):
+    """Run suite inside autotest."""
+    if options.pre_check and not _should_run(options):
+        logging.info('Suite %s-%s is terminated: Lab is closed, OR build is '
+                     'blocked, OR this suite has already been kicked off '
+                     'once in past %d days.',
+                     options.test_source_build, options.name,
+                     _SEARCH_JOB_MAX_DAYS)
+        result = run_suite_common.SuiteResult(
+            run_suite_common.RETURN_CODES.ERROR,
+            {'return_message': ("Lab is closed OR other reason"
+                                " (see code, it's complicated)")})
+    else:
+        result = _run_task(options)
+
+    if options.json_dump:
+        run_suite_common.dump_json(result.output_dict)
+
+    return result
+
+
 def main():
     """Entry point."""
     utils.verify_not_root_user()
@@ -2057,21 +2129,11 @@
         parser.print_help()
         result = run_suite_common.SuiteResult(
                 run_suite_common.RETURN_CODES.INVALID_OPTIONS)
-    elif options.pre_check and not _should_run(options):
-        logging.info('Suite %s-%s is terminated: Lab is closed, OR build is '
-                     'blocked, OR this suite has already been kicked off '
-                     'once in past %d days.',
-                     options.test_source_build, options.name,
-                     _SEARCH_JOB_MAX_DAYS)
-        result = run_suite_common.SuiteResult(
-            run_suite_common.RETURN_CODES.ERROR,
-            {'return_message': ("Lab is closed OR other reason"
-                                " (see code, it's complicated)")})
     else:
-        result = _run_task(options)
-
-    if options.json_dump:
-        run_suite_common.dump_json(result.output_dict)
+        if _check_if_use_skylab(options):
+            result = _run_with_skylab(options)
+        else:
+            result = _run_with_autotest(options)
 
     logging.info('Will return from run_suite with status: %s',
                   run_suite_common.RETURN_CODES.get_string(result.return_code))
diff --git a/site_utils/test_runner_utils.py b/site_utils/test_runner_utils.py
index ab0d3a2..647d59b 100755
--- a/site_utils/test_runner_utils.py
+++ b/site_utils/test_runner_utils.py
@@ -690,7 +690,7 @@
         sys.exit(1)
 
 
-def create_results_directory(results_directory=None):
+def create_results_directory(results_directory=None, board_name=None):
     """Create a results directory.
 
     If no directory is specified this method will create and return a
@@ -704,7 +704,10 @@
     """
     if results_directory is None:
         # Create a results_directory as subdir of /tmp
-        results_directory = tempfile.mkdtemp(prefix='test_that_results_')
+        dirname_prefix='test_that_results_'
+        if board_name is not None:
+            dirname_prefix += (board_name + '_')
+        results_directory = tempfile.mkdtemp(prefix=dirname_prefix)
     else:
         # Delete results_directory if it already exists.
         try:
diff --git a/site_utils/test_that.py b/site_utils/test_that.py
index 99c4ad4..d5a9dbd 100755
--- a/site_utils/test_that.py
+++ b/site_utils/test_that.py
@@ -245,7 +245,7 @@
         return 1
 
     results_directory = test_runner_utils.create_results_directory(
-            arguments.results_dir)
+            arguments.results_dir, arguments.board)
     test_runner_utils.add_ssh_identity(results_directory,
                                        arguments.ssh_private_key)
     arguments.results_dir = results_directory
diff --git a/test_suites/control.faft_cr50_prepvt b/test_suites/control.faft_cr50_prepvt
index 87b42d4..ec15b3a 100644
--- a/test_suites/control.faft_cr50_prepvt
+++ b/test_suites/control.faft_cr50_prepvt
@@ -35,7 +35,6 @@
 args_dict['add_experimental'] = True
 args_dict['job'] = job
 args_dict['test_args'] = {
-	'ccd_lockout' : 'False',
 	'ccd_open_restricted' : 'False',
 }
 
diff --git a/test_suites/control.faft_cr50_pvt b/test_suites/control.faft_cr50_pvt
index 03556d9..9ad8ed9 100644
--- a/test_suites/control.faft_cr50_pvt
+++ b/test_suites/control.faft_cr50_pvt
@@ -33,7 +33,6 @@
 args_dict['add_experimental'] = True
 args_dict['job'] = job
 args_dict['test_args'] = {
-	'ccd_lockout' : 'False',
-	'cr50_qual_version' : '0.3.11/0:0:0'
+	'cr50_qual_version' : '0.3.15/FFFF:0:10000'
 }
 dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.wifi_matfunc_noservo b/test_suites/control.wifi_matfunc_noservo
deleted file mode 100644
index d44ca4e..0000000
--- a/test_suites/control.wifi_matfunc_noservo
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kirtika@chromium.org"
-NAME = "wifi_matfunc_noservo"
-PURPOSE = "Clone of wifi_matfunc, excluding the tests that need servo - Verify basic WiFi functionality."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-This is an example of a dynamic test suite.
-
-@param build: The name of the image to test.
-          Ex: x86-mario-release/R17-1412.33.0-a1-b29
-@param board: The board to test on. Ex: x86-mario
-@param pool: The pool of machines to utilize for scheduling. If pool=None
-             board is used.
-@param check_hosts: require appropriate live hosts to exist in the lab.
-@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
-@param file_bugs: If True your suite will file bugs on failures.
-@param max_run_time: Amount of time each test shoud run in minutes.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['add_experimental'] = True
-args_dict['max_runtime_mins'] = 60
-args_dict['name'] = NAME
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/tko/parse.py b/tko/parse.py
index 28db6d2..23a2cee 100755
--- a/tko/parse.py
+++ b/tko/parse.py
@@ -397,7 +397,7 @@
         tko_utils.dprint("* testname, subdir, status, reason: %s %s %s %s"
                          % (test.testname, test.subdir, test.status,
                             test.reason))
-        if test.status != 'GOOD':
+        if test.status not in ('GOOD', 'WARN'):
             job_successful = False
             pid_file_manager.num_tests_failed += 1
             message_lines.append(format_failure_message(
diff --git a/tko/perf_upload/perf_dashboard_config.json b/tko/perf_upload/perf_dashboard_config.json
index 162f510..12c443b 100644
--- a/tko/perf_upload/perf_dashboard_config.json
+++ b/tko/perf_upload/perf_dashboard_config.json
@@ -674,6 +674,11 @@
     "dashboard_test_name": "speedometer2"
   },
   {
+    "autotest_name": "telemetry_Benchmarks.system_health.memory_desktop",
+    "master_name": "ChromeOSPerf",
+    "dashboard_test_name": "system_health.memory_desktop"
+  },
+  {
     "autotest_name": "telemetry_Benchmarks.tab_switching.typical_25",
     "master_name": "ChromeOSPerf",
     "dashboard_test_name": "tab_switching.typical_25"
diff --git a/utils/external_packages.py b/utils/external_packages.py
index e7fa53e..ef99787 100644
--- a/utils/external_packages.py
+++ b/utils/external_packages.py
@@ -1224,6 +1224,33 @@
         return False
 
 
+class SuiteSchedulerRepo(_ExternalGitRepo):
+    """Clones or updates the suite_scheduler repo."""
+
+    _GIT_URL = ('https://chromium.googlesource.com/chromiumos/'
+                'infra/suite_scheduler')
+
+    def build_and_install(self, install_dir):
+        """
+        Clone if the repo isn't initialized, pull clean bits if it is.
+
+        @param install_dir: destination directory for suite_scheduler
+                            installation.
+        @param master_branch: if True, install master branch. Otherwise,
+                              install prod branch.
+        """
+        local_dir = os.path.join(install_dir, 'suite_scheduler')
+        git_repo = revision_control.GitRepo(
+                local_dir,
+                self._GIT_URL,
+                abs_work_tree=local_dir)
+        git_repo.reinit_repo_at(self.MASTER_BRANCH)
+
+        if git_repo.get_latest_commit_hash():
+            return True
+        return False
+
+
 class BtsocketRepo(_ExternalGitRepo):
     """Clones or updates the btsocket repo."""
 
diff --git a/venv/requirements.txt b/venv/requirements.txt
index 0555833..6921ccb 100644
--- a/venv/requirements.txt
+++ b/venv/requirements.txt
@@ -5,6 +5,7 @@
 fabric==1.12.0
 funcsigs==1.0.2
 mock==2.0.0
+mysql-connector-python==8.0.6
 oauth2client==3.0.0
 paramiko==1.17.2
 pbr==1.10.0
@@ -24,6 +25,5 @@
 
 #Extra requirements for chromiumos/infra/skylab_inventory
 #Do NOT need the mysql versions for autotest
-#mysql-connector-python==8.0.6
 #MySQL-python==1.2.5
 protobuf==3.0.0
diff --git a/venv/skylab_suite/cmd/abort_suite_skylab.py b/venv/skylab_suite/cmd/abort_suite_skylab.py
index 483cdda..018c8fd 100644
--- a/venv/skylab_suite/cmd/abort_suite_skylab.py
+++ b/venv/skylab_suite/cmd/abort_suite_skylab.py
@@ -26,36 +26,36 @@
 from skylab_suite import swarming_lib
 
 
-def _abort_suite_tasks(suite_tasks):
+def _abort_suite_tasks(client, suite_tasks):
     aborted_suite_num = 0
     for pt in suite_tasks:
         logging.info('Aborting suite task %s', pt['task_id'])
-        swarming_lib.abort_task(pt['task_id'])
+        client.abort_task(pt['task_id'])
         if 'children_task_ids' not in pt:
             logging.info('No child tasks for task %s', pt['task_id'])
             continue
 
         for ct in pt['children_task_ids']:
             logging.info('Aborting task %s', ct)
-            swarming_lib.abort_task(ct)
+            client.abort_task(ct)
 
 
-def _get_suite_tasks_by_suite_ids(suite_task_ids):
+def _get_suite_tasks_by_suite_ids(client, suite_task_ids):
     """Return a list of tasks with the given list of suite_task_ids."""
     suite_tasks = []
     for suite_task_id in suite_task_ids:
-        suite_tasks.append(swarming_lib.query_task_by_id(suite_task_id))
+        suite_tasks.append(client.query_task_by_id(suite_task_id))
 
     return suite_tasks
 
 
-def _get_suite_tasks_by_specs(suite_spec):
+def _get_suite_tasks_by_specs(client, suite_spec):
     """Return a list of tasks with given suite_spec."""
     tags = {'pool': swarming_lib.SKYLAB_SUITE_POOL,
             'board': suite_spec.board,
             'build': suite_spec.test_source_build,
             'suite': suite_spec.suite_name}
-    return swarming_lib.query_task_by_tags(tags)
+    return client.query_task_by_tags(tags)
 
 
 def _abort_suite(options):
@@ -64,14 +64,16 @@
     This method aborts the suite job and its children jobs, including
     'RUNNING' jobs.
     """
+    client = swarming_lib.Client(options.swarming_auth_json)
     suite_spec = suite_parser.parse_suite_spec(options)
     if options.suite_task_ids:
-        parent_tasks = _get_suite_tasks_by_suite_ids(options.suite_task_ids)
+        parent_tasks = _get_suite_tasks_by_suite_ids(client,
+                                                     options.suite_task_ids)
     else:
-        parent_tasks = _get_suite_tasks_by_specs(suite_spec)
+        parent_tasks = _get_suite_tasks_by_specs(client, suite_spec)
 
-    _abort_suite_tasks(parent_tasks[:min(options.abort_limit,
-                                         len(parent_tasks))])
+    _abort_suite_tasks(client, parent_tasks[:min(options.abort_limit,
+                                            len(parent_tasks))])
     logging.info('Suite %s/%s has been aborted.', suite_spec.test_source_build,
                  suite_spec.suite_name)
 
diff --git a/venv/skylab_suite/cmd/run_suite_skylab.py b/venv/skylab_suite/cmd/run_suite_skylab.py
index 3a2085b..4b2ef82 100755
--- a/venv/skylab_suite/cmd/run_suite_skylab.py
+++ b/venv/skylab_suite/cmd/run_suite_skylab.py
@@ -56,6 +56,7 @@
 
 
 def _run_suite(options):
+    swarming_client = swarming_lib.Client(options.swarming_auth_json)
     run_suite_common = autotest.load('site_utils.run_suite_common')
     logging.info('Kicked off suite %s', options.suite_name)
     suite_spec = suite_parser.parse_suite_spec(options)
@@ -70,9 +71,9 @@
                     run_suite_common.RETURN_CODES.OK)
 
     if options.suite_name == PROVISION_SUITE_NAME:
-        suite_job = cros_suite.ProvisionSuite(suite_spec)
+        suite_job = cros_suite.ProvisionSuite(suite_spec, swarming_client)
     else:
-        suite_job = cros_suite.Suite(suite_spec)
+        suite_job = cros_suite.Suite(suite_spec, swarming_client)
 
     try:
         suite_job.prepare()
@@ -82,8 +83,9 @@
                 run_suite_common.RETURN_CODES.INFRA_FAILURE)
 
     suite_handler_spec = _parse_suite_handler_spec(options)
-    suite_handler = cros_suite.SuiteHandler(suite_handler_spec)
-    suite_runner.run(suite_job.test_specs,
+    suite_handler = cros_suite.SuiteHandler(suite_handler_spec, swarming_client)
+    suite_runner.run(swarming_client,
+                     suite_job.test_specs,
                      suite_handler,
                      options.dry_run)
 
@@ -113,11 +115,17 @@
     return options
 
 
+def _setup_env(options):
+    """Set environment variables based on commandline options."""
+    os.environ['SWARMING_CREDS'] = options.swarming_auth_json
+
+
 def main():
     """Entry point."""
     autotest.monkeypatch()
 
     options = parse_args()
+    _setup_env(options)
     suite_tracking.setup_logging()
     result = _run_suite(options)
     logging.info('Will return from %s with status: %s',
diff --git a/venv/skylab_suite/cros_suite.py b/venv/skylab_suite/cros_suite.py
index d49a9f7..55e93ca 100644
--- a/venv/skylab_suite/cros_suite.py
+++ b/venv/skylab_suite/cros_suite.py
@@ -77,6 +77,9 @@
                 'pool',
                 'build',
                 'keyvals',
+                # TODO(akeshet): Determine why this is necessary
+                # (can't this just be specified as its own dimension?) and
+                # delete it if it isn't necessary.
                 'bot_id',
                 'dut_name',
                 'expiration_secs',
@@ -93,7 +96,7 @@
     Its responsibility includes handling retries for child tests.
     """
 
-    def __init__(self, specs):
+    def __init__(self, specs, client):
         self._suite_name = specs.suite_name
         self._wait = specs.wait
         self._timeout_mins = specs.timeout_mins
@@ -109,6 +112,7 @@
         self._task_id = os.environ.get('SWARMING_TASK_ID')
         self._task_to_test_maps = {}
         self.successfully_provisioned_duts = set()
+        self._client = client
 
         # It only maintains the swarming task of the final run of each
         # child task, i.e. it doesn't include failed swarming tasks of
@@ -195,7 +199,7 @@
         The final active child task list will include task x1_2 and x2_1, won't
         include x1_1 since it's a task which is finished but get retried later.
         """
-        all_tasks = swarming_lib.get_child_tasks(suite_id)
+        all_tasks = self._client.get_child_tasks(suite_id)
         return [t for t in all_tasks if t['task_id'] in self._task_to_test_maps]
 
     def handle_results(self, suite_id):
@@ -274,10 +278,11 @@
     """The class for a CrOS suite."""
     EXPIRATION_SECS = swarming_lib.DEFAULT_EXPIRATION_SECS
 
-    def __init__(self, spec):
+    def __init__(self, spec, client):
         """Initialize a suite.
 
         @param spec: A SuiteSpec object.
+        @param client: A swarming_lib.Client instance.
         """
         self._ds = None
 
@@ -295,6 +300,7 @@
         self.minimum_duts = spec.minimum_duts
         self.timeout_mins = spec.timeout_mins
         self.quota_account = spec.quota_account
+        self._client = client
 
     @property
     def ds(self):
@@ -414,7 +420,7 @@
         swarming_pool_deps = swarming_lib.task_dependencies_from_labels(
             ['pool:%s' % self.pool])
         dimensions.update(swarming_pool_deps)
-        bots = swarming_lib.query_bots_list(dimensions)
+        bots = self._client.query_bots_list(dimensions)
         return [bot for bot in bots if swarming_lib.bot_available(bot)]
 
 
@@ -422,8 +428,8 @@
     """The class for a CrOS provision suite."""
     EXPIRATION_SECS = swarming_lib.DEFAULT_EXPIRATION_SECS
 
-    def __init__(self, spec):
-        super(ProvisionSuite, self).__init__(spec)
+    def __init__(self, spec, client):
+        super(ProvisionSuite, self).__init__(spec, client)
         self._num_required = spec.suite_args['num_required']
 
     def _find_tests(self, available_bots_num=0):
diff --git a/venv/skylab_suite/suite_parser.py b/venv/skylab_suite/suite_parser.py
index be85032..49b57be 100644
--- a/venv/skylab_suite/suite_parser.py
+++ b/venv/skylab_suite/suite_parser.py
@@ -83,6 +83,10 @@
         help=("Quota account to be used for this suite's jobs, if applicable. "
               "Only relevant for jobs running in a quota scheduler pool "
               "(e.g. quota-metered)."))
+    parser.add_argument(
+        '--swarming_auth_json', default=swarming_lib.DEFAULT_SERVICE_ACCOUNT,
+        action='store', help="Path to swarming service account json creds. "
+        "Specify '' to omit. Otherwise, defaults to bot's default creds.")
 
     # TODO(ayatane): Make sure no callers pass --use_fallback before removing.
     parser.add_argument(
diff --git a/venv/skylab_suite/suite_runner.py b/venv/skylab_suite/suite_runner.py
index c61cc29..808ccee 100644
--- a/venv/skylab_suite/suite_runner.py
+++ b/venv/skylab_suite/suite_runner.py
@@ -13,6 +13,7 @@
 import json
 import logging
 import os
+import re
 import time
 
 from lucifer import autotest
@@ -20,11 +21,9 @@
 from skylab_suite import swarming_lib
 
 
-SKYLAB_SUITE_USER = 'skylab_suite_runner'
-SKYLAB_LUCI_TAG = 'luci_project:chromeos'
 SKYLAB_DRONE_SWARMING_WORKER = '/opt/infra-tools/skylab_swarming_worker'
-
-QUOTA_ACCOUNT_TAG_FORMAT = 'qs_account:%s'
+SKYLAB_SUITE_USER = 'skylab_suite_runner'
+SKYLAB_TOOL = '/opt/infra-tools/skylab'
 
 SUITE_WAIT_SLEEP_INTERVAL_SECONDS = 30
 
@@ -33,30 +32,33 @@
                                'modem_repair']
 
 
-def run(test_specs, suite_handler, dry_run=False):
+def run(client, test_specs, suite_handler, dry_run=False):
     """Run a CrOS dynamic test suite.
 
+    @param client: A swarming_lib.Client instance.
     @param test_specs: A list of cros_suite.TestSpec objects.
     @param suite_handler: A cros_suite.SuiteHandler object.
     @param dry_run: Whether to kick off dry runs of the tests.
     """
+    assert isinstance(client, swarming_lib.Client)
     if suite_handler.suite_id:
         # Resume an existing suite.
-        _resume_suite(test_specs, suite_handler, dry_run)
+        _resume_suite(client, test_specs, suite_handler, dry_run)
     else:
         # Make a new suite.
         _run_suite(test_specs, suite_handler, dry_run)
 
 
-def _resume_suite(test_specs, suite_handler, dry_run=False):
+def _resume_suite(client, test_specs, suite_handler, dry_run=False):
     """Resume a suite and its child tasks by given suite id."""
+    assert isinstance(client, swarming_lib.Client)
     suite_id = suite_handler.suite_id
-    all_tasks = swarming_lib.get_child_tasks(suite_id)
+    all_tasks = client.get_child_tasks(suite_id)
     not_yet_scheduled = _get_unscheduled_test_specs(
             test_specs, suite_handler, all_tasks)
 
     logging.info('Not yet scheduled test_specs: %r', not_yet_scheduled)
-    _schedule_test_specs(not_yet_scheduled, suite_handler, suite_id, dry_run)
+    _create_test_tasks(not_yet_scheduled, suite_handler, suite_id, dry_run)
 
     if suite_id is not None and suite_handler.should_wait():
         _wait_for_results(suite_handler, dry_run=dry_run)
@@ -115,15 +117,18 @@
 def _run_suite(test_specs, suite_handler, dry_run=False):
     """Make a new suite."""
     suite_id = os.environ.get('SWARMING_TASK_ID')
-    _schedule_test_specs(test_specs, suite_handler, suite_id, dry_run)
+    if not suite_id:
+        raise ValueError("Unable to determine suite's task id from env var "
+                         "SWARMING_TASK_ID.")
+    _create_test_tasks(test_specs, suite_handler, suite_id, dry_run)
     suite_handler.set_suite_id(suite_id)
 
-    if suite_id is not None and suite_handler.should_wait():
+    if suite_handler.should_wait():
         _wait_for_results(suite_handler, dry_run=dry_run)
 
 
-def _schedule_test_specs(test_specs, suite_handler, suite_id, dry_run=False):
-    """Schedule a list of tests (TestSpecs).
+def _create_test_tasks(test_specs, suite_handler, suite_id, dry_run=False):
+    """Create test tasks for a list of tests (TestSpecs).
 
     Given a list of TestSpec object, this function will schedule them on
     swarming one by one, and add them to the swarming_task_id-to-test map
@@ -137,7 +142,7 @@
     @param dry_run: Whether to kick off dry runs of the tests.
     """
     for test_spec in test_specs:
-        test_task_id = _schedule_test(
+        test_task_id = _create_test_task(
                 test_spec,
                 suite_id=suite_id,
                 is_provision=suite_handler.is_provision(),
@@ -150,146 +155,118 @@
                         previous_retried_ids=[]))
 
 
-def _get_suite_cmd(test_spec, suite_id):
-    """Return the commands for running a suite with or without provision.
-
-    @param test_spec: a cros_suite.TestSpec object.
-    @param suite_id: a string of parent suite's swarming task id.
-
-    @return a list of commands: [cmd, cmd_with_fallback], in which cmd is the
-        normal cmd to kick off a test, cmd_with_fallback is the cmd to
-        provision the DUT before, then kick off the test.
-    """
-    constants = autotest.load('server.cros.dynamic_suite.constants')
-    job_keyvals = test_spec.keyvals.copy()
-    job_keyvals[constants.JOB_EXPERIMENTAL_KEY] = test_spec.test.experimental
-    if suite_id is not None:
-        job_keyvals[constants.PARENT_JOB_ID] = suite_id
-
-    cmd = [SKYLAB_DRONE_SWARMING_WORKER]
-    if test_spec.test.test_type.lower() == 'client':
-      cmd += ['-client-test']
-
-    cmd += ['-keyvals', _convert_dict_to_string(job_keyvals)]
-    cmd += ['-task-name', test_spec.test.name]
-
-    return [cmd, cmd + ['-provision-labels',
-                        'cros-version:%s' % test_spec.build]]
-
-
-def _get_provision_expiration_secs(test_spec, is_provision):
-    """Set the provision expiration secs in fallback request.
-
-    TODO (xixuan): Find a better way to not hard-code expiration secs for
-    provision slice. Now hard-code it as 95% of the timeout for CQ, and 5% of
-    timeout for others, as CQ has a provision stage before.
-    """
-    if test_spec.pool in ['cq'] and not is_provision:
-      return int(0.95 * test_spec.expiration_secs)
-
-    return int(0.05 * test_spec.expiration_secs)
-
-
-def _run_swarming_cmd_with_fallback(cmds, dimensions, test_spec, suite_id,
-                                    is_provision):
-    """Kick off a fallback swarming cmd.
-
-    @param cmds: A list of commands: [cmd, cmd_with_fallback]. Each of the cmd
-        is a list.
-    @param dimensions: A dict of dimensions used to form the swarming cmd.
-    @param test_spec: a cros_suite.TestSpec object.
-    @param suite_id: The suite id of the test to kick off.
-    @param is_provision: Indicate whether this suite is a provision suite.
-    """
-    fallback_dimensions = dimensions.copy()
-    if test_spec.bot_id:
-        fallback_dimensions['id'] = test_spec.bot_id
-
-    normal_dimensions = fallback_dimensions.copy()
-    normal_dimensions['provisionable-cros-version'] = test_spec.build
-    all_dimensions = [normal_dimensions, fallback_dimensions]
-    tags = [SKYLAB_LUCI_TAG, 'build:%s' % test_spec.build]
-    if suite_id is not None:
-        tags += ['parent_task_id:%s' % suite_id]
-
-    if test_spec.quota_account is not None:
-        tags += [QUOTA_ACCOUNT_TAG_FORMAT % test_spec.quota_account]
-
-    provision_expiration_secs = _get_provision_expiration_secs(
-            test_spec, is_provision)
-    all_expiration_secs = [
-            provision_expiration_secs,
-            test_spec.expiration_secs - provision_expiration_secs]
-
-    # Add tags and command flags for LogDog.
-    logdog_url = swarming_lib.make_logdog_annotation_url()
-    if logdog_url:
-        tags += ['log_location:' + logdog_url]
-        for cmd in cmds:
-            cmd.extend(['-logdog-annotation-url', logdog_url])
-
-    # Use first slice to kick off normal cmd without '-provision-labels',
-    # since the assigned DUT is already provisioned by given build.
-    # Use second slice to kick off cmd_with_fallback to enable provision before
-    # running tests, as the assigned DUT hasn't been provisioned.
-    json_request = swarming_lib.make_fallback_request_dict(
-            cmds=cmds,
-            slices_dimensions=all_dimensions,
-            slices_expiration_secs=all_expiration_secs,
-            task_name=test_spec.test.name,
-            priority=test_spec.priority,
-            tags=tags,
-            user=SKYLAB_SUITE_USER,
-            parent_task_id=suite_id,
-            grace_period_secs=test_spec.grace_period_secs,
-            execution_timeout_secs=test_spec.execution_timeout_secs,
-            io_timeout_secs=test_spec.io_timeout_secs)
-
-    cros_build_lib = autotest.chromite_load('cros_build_lib')
-    result = cros_build_lib.RunCommand(swarming_lib.get_new_task_swarming_cmd(),
-                                       input=json.dumps(json_request),
-                                       env=os.environ.copy(),
-                                       capture_output=True)
-    logging.info('Input: %r', json_request)
-    return json.loads(result.output)['task_id']
-
-
-def _schedule_test(test_spec, suite_id=None,
-                   is_provision=False, dry_run=False):
-    """Schedule a CrOS test.
+def _create_test_task(test_spec, suite_id=None,
+                      is_provision=False, dry_run=False):
+    """Create a test task for a given test spec.
 
     @param test_spec: A cros_suite.TestSpec object.
     @param suite_id: the suite task id of the test.
-    @param dry_run: Whether to kick off a dry run of a swarming cmd.
+    @param dry_run: If true, don't actually create task.
 
     @return the swarming task id of this task.
     """
-    logging.info('Scheduling test %s', test_spec.test.name)
-    cmd, cmd_with_fallback = _get_suite_cmd(test_spec, suite_id)
+    logging.info('Creating task for test %s', test_spec.test.name)
+    skylab_tool_path = os.environ.get('SKYLAB_TOOL', SKYLAB_TOOL)
+
+    cmd = [
+        skylab_tool_path, 'create-test',
+        '-board', test_spec.board,
+        '-image', test_spec.build,
+        '-service-account-json', os.environ['SWARMING_CREDS'],
+        ]
+    if _is_dev():
+        cmd += ['-dev']
+    if test_spec.pool:
+        # TODO(akeshet): Clean up this hack around pool name translation.
+        autotest_pool_label = 'pool:%s' % test_spec.pool
+        pool_dependency_value = swarming_lib.task_dependencies_from_labels(
+            [autotest_pool_label])['label-pool']
+        cmd += ['-pool', pool_dependency_value]
+
+    if test_spec.model:
+        cmd += ['-model', test_spec.model]
+    if test_spec.quota_account:
+        cmd += ['-qs-account', test_spec.quota_account]
+    if test_spec.test.test_type.lower() == 'client':
+        cmd += ['-client-test']
+
+    tags = _compute_tags(test_spec.build, suite_id)
+    dimensions = _compute_dimensions(
+            test_spec.bot_id, test_spec.test.dependencies)
+    keyvals_flat = _compute_job_keyvals_flat(test_spec.keyvals, suite_id)
+
+    for tag in tags:
+        cmd += ['-tag', tag]
+    for keyval in keyvals_flat:
+        cmd += ['-keyval', keyval]
+    cmd += [test_spec.test.name]
+    cmd += dimensions
+
     if dry_run:
-        cmd = ['/bin/echo'] + cmd
-        test_spec.test.name = 'Echo ' + test_spec.test.name
+        logging.info('Would have created task with command %s', cmd)
+        return
 
-    dimensions = {'pool': swarming_lib.SKYLAB_DRONE_POOL,
-                  'label-board': test_spec.board,
-                  'dut_state': swarming_lib.SWARMING_DUT_READY_STATUS}
-    if test_spec.model is not None:
-        dimensions['label-model'] = test_spec.model
+    # TODO(akeshet): Avoid this late chromite import.
+    cros_build_lib = autotest.chromite_load('cros_build_lib')
+    result = cros_build_lib.RunCommand(cmd, capture_output=True)
+    # TODO(akeshet): Use -json flag and json-parse output of the command instead
+    # of regex matching to determine task_id.
+    m = re.match('.*id=(.*)$', result.output)
+    task_id = m.group(1)
+    logging.info('Created task with id %s', task_id)
+    return task_id
 
+
+# TODO(akeshet): Eliminate the need for this, by either adding an explicit
+# swarming_server argument to skylab tool, or having the tool respect the
+# SWARMING_SERVER environment variable. See crbug.com/948774
+def _is_dev():
+    """Detect whether skylab tool should be invoked with -dev flag."""
+    return 'chromium-swarm-dev' in os.environ['SWARMING_SERVER']
+
+def _compute_tags(build, suite_id):
+    tags = [
+        'build:%s' % build,
+    ]
+    if suite_id is not None:
+        tags += ['parent_task_id:%s' % suite_id]
+    return tags
+
+
+def _compute_dimensions(bot_id, dependencies):
+    dimensions = []
+    if bot_id:
+        dimensions += ['id:%s' % bot_id]
+    deps = _filter_unsupported_dependencies(dependencies)
+    flattened_swarming_deps = sorted([
+        '%s:%s' % (k, v) for
+        k, v in swarming_lib.task_dependencies_from_labels(deps).items()
+        ])
+    dimensions += flattened_swarming_deps
+    return dimensions
+
+
+def _compute_job_keyvals_flat(keyvals, suite_id):
+    # Job keyvals calculation.
+    job_keyvals = keyvals.copy()
+    if suite_id is not None:
+        # TODO(akeshet): Avoid this late autotest constants import.
+        constants = autotest.load('server.cros.dynamic_suite.constants')
+        job_keyvals[constants.PARENT_JOB_ID] = suite_id
+    keyvals_flat = sorted(
+        ['%s:%s' % (k, v) for k, v in job_keyvals.items()])
+    return keyvals_flat
+
+
+def _filter_unsupported_dependencies(dependencies):
+    """Filter out Skylab-unsupported test dependencies, with a warning."""
     deps = []
-    for dep in test_spec.test.dependencies:
+    for dep in dependencies:
         if dep in _NOT_SUPPORTED_DEPENDENCIES:
             logging.warning('Dependency %s is not supported in skylab', dep)
-            continue
-
-        deps.append(dep)
-
-    deps.append('pool:%s' % test_spec.pool)
-    dimensions.update(swarming_lib.task_dependencies_from_labels(deps))
-
-    return _run_swarming_cmd_with_fallback(
-            [cmd, cmd_with_fallback], dimensions, test_spec,
-            suite_id, is_provision)
+        else:
+            deps.append(dep)
+    return deps
 
 
 @contextlib.contextmanager
@@ -354,7 +331,7 @@
     logging.info('Retrying test %s, remaining %d retries.',
                  last_retry_spec.test_spec.test.name,
                  last_retry_spec.remaining_retries - 1)
-    retried_task_id = _schedule_test(
+    retried_task_id = _create_test_task(
             last_retry_spec.test_spec,
             suite_id=suite_handler.suite_id,
             is_provision=suite_handler.is_provision(),
diff --git a/venv/skylab_suite/suite_tracking.py b/venv/skylab_suite/suite_tracking.py
index cbea477..709840d 100644
--- a/venv/skylab_suite/suite_tracking.py
+++ b/venv/skylab_suite/suite_tracking.py
@@ -11,9 +11,11 @@
 import contextlib
 import logging
 import logging.config
+import mysql.connector
 
 from lucifer import autotest
 from skylab_suite import swarming_lib
+from skylab_suite import tko_test_views
 
 # Test status in _IGNORED_TEST_STATE won't be reported as test failure.
 # Or test may be reported as failure as
@@ -44,7 +46,7 @@
                 anchor_test += '-' + hspec.test_spec.dut_name
 
             show_text = '[Test-logs]: %s' % anchor_test
-            _print_task_link_annotation(task_id, show_text)
+            _print_task_result_link_annotation(task_id, show_text)
 
 
 def log_suite_results(suite_name, suite_handler):
@@ -68,17 +70,57 @@
     logging.info('Links to tests:')
     logging.info('Suite Job %s %s', suite_name,
                  swarming_lib.get_task_link(suite_handler.suite_id))
-    _log_test_links(test_results)
+    _log_test_result_links(test_results)
 
     _log_buildbot_links(suite_handler, suite_name, test_results)
-
     return return_code
 
 
-def _print_task_link_annotation(task_id, text):
+def _get_failed_test_views_from_tko(task_ids):
+    """Get test views corresponding to failed tests from TKO.
+
+    @param task_ids: list of Swarming request IDs.
+    @return {task_id: [tko_test_views.Row()]}
+    """
+    conn = _new_tko_connection()
+    if conn is None:
+        return {}
+
+    try:
+        views = tko_test_views.get(conn, task_ids)
+    except mysql.connector.Error:
+        logging.exception('Failed to obtain failure reasons from TKO')
+        return {}
+    return {k: tko_test_views.filter_failed(v) for k, v in views.iteritems()}
+
+
+def _new_tko_connection():
+    global_config = autotest.load('client.common_lib.global_config')
+    config = global_config.global_config
+    try:
+        host = config.get_config_value('AUTOTEST_WEB', 'global_db_host')
+        user = config.get_config_value('AUTOTEST_WEB', 'global_db_user')
+        password = config.get_config_value('AUTOTEST_WEB', 'global_db_password')
+        database = config.get_config_value('AUTOTEST_WEB', 'database')
+    except global_config.ConfigError:
+        logging.exception('Could not load TKO connection configuration')
+        return None
+    try:
+        if host.startswith('/'):
+            return mysql.connector.connect(unix_socket=host, user=user,
+                                           password=password, database=database)
+        else:
+            return mysql.connector.connect(host=host, user=user,
+                                           password=password, database=database)
+    except mysql.connector.Error:
+        logging.exception('Failed to connect to TKO database')
+        return None
+
+
+def _print_task_result_link_annotation(task_id, text):
     """Print the link of task logs.
 
-    Given text: '[Test-logs]: dummy_Pass-chromeos4-row7-rack6-host19'
+    Given text: 'dummy_Pass-chromeos4-row7-rack6-host19'
           task_id: '3ee300e77a576e10'
 
     The printed output will be:
@@ -91,8 +133,8 @@
     @param task_id: a string task_id to form the swarming url.
     """
     annotations = autotest.chromite_load('buildbot_annotations')
-    print(annotations.StepLink(
-            text, swarming_lib.get_task_link(task_id)))
+    print(annotations.StepLink('[Test-logs]: %s' % text,
+                               swarming_lib.get_stainless_logs_link(task_id)))
 
 
 def get_task_id_for_task_summaries(task_id):
@@ -137,19 +179,50 @@
         # finishes and claims that it succeeds. Skip logging them in buildbot.
         return
 
+    failed_results = [t for t in test_results if _is_failed_result(t)]
+    if suite_handler.is_provision():
+        _log_buildbot_links_for_provision_tasks(failed_results)
+    else:
+        _log_buildbot_links_for_tasks(failed_results)
+
+
+def _log_buildbot_links_for_provision_tasks(test_results):
+    for result in test_results:
+        _print_task_result_link_annotation(result['task_ids'][0],
+                                           _get_show_test_name(result))
+
+
+def _log_buildbot_links_for_tasks(test_results):
+    task_ids = []
+    for result in test_results:
+        task_ids += result.get('task_ids', [])
+    failed_test_views = _get_failed_test_views_from_tko(task_ids)
+
+    for result in test_results:
+        task_id = result['task_ids'][0]
+        test_name = result['test_name']
+        if task_id in failed_test_views:
+            for v in failed_test_views[task_id]:
+                _print_task_result_link_annotation(task_id,
+                                                   _reason_from_test_view(v))
+        else:
+            _print_task_result_link_annotation(task_id, test_name)
+        _log_buildbot_links_for_test_history(task_id, test_name)
+
+
+def _log_buildbot_links_for_test_history(task_id, test_name):
     annotations = autotest.chromite_load('buildbot_annotations')
     reporting_utils = autotest.load('server.cros.dynamic_suite.reporting_utils')
-    for result in test_results:
-        if result['state'] not in [swarming_lib.TASK_COMPLETED_SUCCESS,
-                                   swarming_lib.TASK_RUNNING]:
-            _print_task_link_annotation(
-                    result['task_ids'][0],
-                    '[Test-logs]: %s' % _get_show_test_name(result))
+    print(annotations.StepLink(
+            '[Test-History]: %s' % test_name,
+            reporting_utils.link_test_history(test_name)))
 
-            if not suite_handler.is_provision():
-                print(annotations.StepLink(
-                        '[Test-History]: %s' % result['test_name'],
-                        reporting_utils.link_test_history(result['test_name'])))
+
+def _reason_from_test_view(test_view):
+    reason = '%s: %s' % (test_view.name, test_view.status)
+    if test_view.reason:
+        reason = '%s: %s' % (reason, test_view.reason)
+    return reason
 
 
 def _log_test_results(test_results):
@@ -193,7 +266,7 @@
     for idx, task_id in enumerate(result['task_ids']):
         retry_suffix = ' (%dth retry)' % idx if idx > 0 else ''
         anchor_test += retry_suffix
-        _print_task_link_annotation(
+        _print_task_result_link_annotation(
                 task_id,
                 '[%s]: %s' % (anchor_test, result['state']))
 
@@ -295,13 +368,13 @@
             run_suite_common.RETURN_CODES.OK)
 
 
-def _log_test_links(child_test_results):
+def _log_test_result_links(child_test_results):
     """Output child results for a suite."""
     for result in child_test_results:
         for idx, task_id in enumerate(result['task_ids']):
             retry_suffix = ' (%dth retry)' % idx if idx > 0 else ''
             logging.info('%s  %s', result['test_name'] + retry_suffix,
-                         swarming_lib.get_task_link(task_id))
+                         swarming_lib.get_stainless_logs_link(task_id))
 
 
 def setup_logging():
@@ -323,3 +396,11 @@
         },
         'disable_existing_loggers': False,
     })
+
+
+def _is_failed_result(result):
+    return result['state'] not in [
+            swarming_lib.TASK_COMPLETED_SUCCESS,
+            swarming_lib.TASK_RUNNING,
+    ]
+
diff --git a/venv/skylab_suite/swarming_lib.py b/venv/skylab_suite/swarming_lib.py
index db9aa8d..7c0575e 100644
--- a/venv/skylab_suite/swarming_lib.py
+++ b/venv/skylab_suite/swarming_lib.py
@@ -8,7 +8,6 @@
 from __future__ import division
 from __future__ import print_function
 
-import collections
 import json
 import logging
 import operator
@@ -20,7 +19,8 @@
 from skylab_suite import errors
 
 
-SERVICE_ACCOUNT = '/creds/skylab_swarming_bot/skylab_bot_service_account.json'
+DEFAULT_SERVICE_ACCOUNT = (
+        '/creds/skylab_swarming_bot/skylab_bot_service_account.json')
 SKYLAB_DRONE_POOL = 'ChromeOSSkylab'
 SKYLAB_SUITE_POOL = 'ChromeOSSkylab-suite'
 
@@ -71,43 +71,12 @@
 
 SWARMING_DUT_READY_STATUS = 'ready'
 
-# The structure of fallback swarming task request is:
-# NewTaskRequest:
-#     ...
-#     task_slices  ->  NewTaskSlice:
-#                          ...
-#                          properties  ->  TaskProperties
-#                                              ...
-TaskProperties = collections.namedtuple(
-        'TaskProperties',
-        [
-                'command',
-                'dimensions',
-                'execution_timeout_secs',
-                'grace_period_secs',
-                'io_timeout_secs',
-        ])
+_STAINLESS_LOGS_BROWSER_URL_TEMPLATE = (
+        "https://stainless.corp.google.com"
+        "/browse/chromeos-autotest-results/swarming-%(request_id)s/"
+)
 
-NewTaskSlice = collections.namedtuple(
-        'NewTaskSlice',
-        [
-                'expiration_secs',
-                'properties',
-        ])
-
-NewTaskRequest = collections.namedtuple(
-        'NewTaskRequest',
-        [
-                'name',
-                'parent_task_id',
-                'priority',
-                'tags',
-                'user',
-                'task_slices',
-        ])
-
-
-def _get_client():
+def _get_client_path():
     return os.path.join(
             os.path.expanduser('~'),
             'chromiumos/chromite/third_party/swarming.client/swarming.py')
@@ -138,12 +107,6 @@
     return dependencies
 
 
-def get_basic_swarming_cmd(command):
-    return [_get_client(), command,
-            '--auth-service-account-json', SERVICE_ACCOUNT,
-            '--swarming', get_swarming_server()]
-
-
 def make_logdog_annotation_url():
     """Return a unique LogDog annotation URL.
 
@@ -181,63 +144,6 @@
         )
 
 
-def get_new_task_swarming_cmd():
-    """Return a list of command args for creating a new task."""
-    return get_basic_swarming_cmd('post') + ['tasks/new']
-
-
-def make_fallback_request_dict(cmds, slices_dimensions, slices_expiration_secs,
-                               task_name, priority, tags, user,
-                               parent_task_id='',
-                               expiration_secs=DEFAULT_EXPIRATION_SECS,
-                               grace_period_secs=DEFAULT_TIMEOUT_SECS,
-                               execution_timeout_secs=DEFAULT_TIMEOUT_SECS,
-                               io_timeout_secs=DEFAULT_TIMEOUT_SECS):
-    """Form a json-compatible dict for fallback swarming call.
-
-    @param cmds: A list of cmd to run on swarming bots.
-    @param slices_dimensions: A list of dict to indicates different tries'
-        dimensions.
-    @param slices_expiration_secs: A list of Integer to indicates each slice's
-        expiration_secs.
-    @param task_name: The request's name.
-    @param priority: The request's priority. An integer.
-    @param grace_period_secs: The seconds to send a task after a SIGTERM before
-        sending it a SIGKILL.
-    @param execution_timeout_secs: The seconds to run before a task gets
-        terminated.
-    @param io_timeout_secs: The seconds to wait before a task is considered
-        hung.
-
-    @return a json-compatible dict, as a request for swarming call.
-    """
-    assert len(cmds) == len(slices_dimensions)
-    assert len(cmds) == len(slices_expiration_secs)
-    task_slices = []
-    for cmd, dimensions, expiration_secs in zip(cmds, slices_dimensions,
-                                                slices_expiration_secs):
-        properties = TaskProperties(
-                command=cmd,
-                dimensions=dimensions,
-                execution_timeout_secs=execution_timeout_secs,
-                grace_period_secs=grace_period_secs,
-                io_timeout_secs=io_timeout_secs)
-        task_slices.append(
-                NewTaskSlice(
-                        expiration_secs=expiration_secs,
-                        properties=properties))
-
-    task_request = NewTaskRequest(
-        name=task_name,
-        parent_task_id=parent_task_id,
-        priority=priority,
-        tags=tags,
-        user=user,
-        task_slices=task_slices)
-
-    return _to_raw_request(task_request)
-
-
 def _namedtuple_to_dict(value):
     """Recursively converts a namedtuple to a dict.
 
@@ -263,30 +169,16 @@
     return out
 
 
-def _to_raw_request(request):
-    """Returns the json-compatible dict expected by the server.
-
-    Args:
-      request: a NewTaskRequest object.
-
-    Returns:
-      A json-compatible dict, which could be parsed by swarming proxy
-      service.
-    """
-    out = _namedtuple_to_dict(request)
-    for task_slice in out['task_slices']:
-        task_slice['properties']['dimensions'] = [
-                {'key': k, 'value': v}
-                for k, v in task_slice['properties']['dimensions'].iteritems()
-        ]
-        task_slice['properties']['dimensions'].sort(key=lambda x: x['key'])
-    return out
-
-
 def get_task_link(task_id):
     return '%s/user/task/%s' % (os.environ.get('SWARMING_SERVER'), task_id)
 
 
+def get_stainless_logs_link(request_id):
+    """Gets a link to the stainless logs for a given task ID."""
+    return _STAINLESS_LOGS_BROWSER_URL_TEMPLATE % {
+            'request_id': request_id,
+    }
+
 def get_task_final_state(task):
     """Get the final state of a swarming task.
 
@@ -311,96 +203,6 @@
 
     return ''
 
-
-def query_bots_count(dimensions):
-    """Get bots count for given requirements.
-
-    @param dimensions: A dict of dimensions for swarming bots.
-
-    @return a dict, which contains counts for different status of bots.
-    """
-    basic_swarming_cmd = get_basic_swarming_cmd('query')
-    conditions = [('dimensions', '%s:%s' % (k, v))
-                  for k, v in dimensions.iteritems()]
-    swarming_cmd = basic_swarming_cmd + ['bots/count?%s' %
-                                         urllib.urlencode(conditions)]
-    cros_build_lib = autotest.chromite_load('cros_build_lib')
-    result = cros_build_lib.RunCommand(swarming_cmd, capture_output=True)
-    return json.loads(result.output)
-
-
-def get_idle_bots_count(outputs):
-    """Get the idle bots count.
-
-    @param outputs: The outputs of |query_bots_count|.
-    """
-    return (int(outputs['count']) - int(outputs['busy']) - int(outputs['dead'])
-            - int(outputs['quarantined']))
-
-
-def query_task_by_tags(tags):
-    """Get tasks for given tags.
-
-    @param tags: A dict of tags for swarming tasks.
-
-    @return a list, which contains all tasks queried by the given tags.
-    """
-    basic_swarming_cmd = get_basic_swarming_cmd('query')
-    conditions = [('tags', '%s:%s' % (k, v)) for k, v in tags.iteritems()]
-    swarming_cmd = basic_swarming_cmd + ['tasks/list?%s' %
-                                         urllib.urlencode(conditions)]
-    cros_build_lib = autotest.chromite_load('cros_build_lib')
-    result = cros_build_lib.RunCommand(swarming_cmd, capture_output=True)
-    json_output = json.loads(result.output)
-    return json_output.get('items', [])
-
-
-def query_task_by_id(task_id):
-    """Get task for given id.
-
-    @param task_id: A string to indicate a swarming task id.
-
-    @return a dict, which contains the task with the given task_id.
-    """
-    basic_swarming_cmd = get_basic_swarming_cmd('query')
-    swarming_cmd = basic_swarming_cmd + ['task/%s/result' % task_id]
-    cros_build_lib = autotest.chromite_load('cros_build_lib')
-    result = cros_build_lib.RunCommand(swarming_cmd, capture_output=True)
-    return json.loads(result.output)
-
-
-def abort_task(task_id):
-    """Abort a swarming task by its id.
-
-    @param task_id: A string swarming task id.
-    """
-    basic_swarming_cmd = get_basic_swarming_cmd('cancel')
-    swarming_cmd = basic_swarming_cmd + ['--kill-running', task_id]
-    cros_build_lib = autotest.chromite_load('cros_build_lib')
-    try:
-        cros_build_lib.RunCommand(swarming_cmd, log_output=True)
-    except cros_build_lib.RunCommandError:
-        logging.error('Task %s probably already gone, skip canceling it.',
-                      task_id)
-
-
-def query_bots_list(dimensions):
-    """Get bots list for given requirements.
-
-    @param dimensions: A dict of dimensions for swarming bots.
-
-    @return a list of bot dicts.
-    """
-    basic_swarming_cmd = get_basic_swarming_cmd('query')
-    conditions = [('dimensions', '%s:%s' % (k, v))
-                  for k, v in dimensions.iteritems()]
-    swarming_cmd = basic_swarming_cmd + ['bots/list?%s' %
-                                         urllib.urlencode(conditions)]
-    cros_build_lib = autotest.chromite_load('cros_build_lib')
-    result = cros_build_lib.RunCommand(swarming_cmd, capture_output=True)
-    return json.loads(result.output).get('items', [])
-
-
 def bot_available(bot):
     """Check whether a bot is available.
 
@@ -412,19 +214,94 @@
     return not (bot['is_dead'] or bot['quarantined'])
 
 
-def get_child_tasks(parent_task_id):
-    """Get the child tasks based on a parent swarming task id.
+class Client(object):
+    """Wrapper for interacting with swarming client."""
 
-    @param parent_task_id: The parent swarming task id.
+    # TODO(akeshet): Drop auth_json_path argument and use the same
+    # SWARMING_CREDS envvar that is used to select creds for skylab tool.
+    def __init__(self, auth_json_path=DEFAULT_SERVICE_ACCOUNT):
+        self._auth_json_path = auth_json_path
 
-    @return a list of dicts, each dict refers to the whole stats of a task,
-        keys include 'name', 'bot_dimensions', 'tags', 'bot_id', 'state', etc.
-    """
-    swarming_cmd = get_basic_swarming_cmd('query')
-    swarming_cmd += ['tasks/list?tags=parent_task_id:%s' % parent_task_id]
-    timeout_util = autotest.chromite_load('timeout_util')
-    cros_build_lib = autotest.chromite_load('cros_build_lib')
-    with timeout_util.Timeout(60):
-        child_tasks = cros_build_lib.RunCommand(
-                swarming_cmd, capture_output=True)
-        return json.loads(child_tasks.output)['items']
+    def query_task_by_tags(self, tags):
+        """Get tasks for given tags.
+
+        @param tags: A dict of tags for swarming tasks.
+
+        @return a list, which contains all tasks queried by the given tags.
+        """
+        basic_swarming_cmd = self.get_basic_swarming_cmd('query')
+        conditions = [('tags', '%s:%s' % (k, v)) for k, v in tags.iteritems()]
+        swarming_cmd = basic_swarming_cmd + ['tasks/list?%s' %
+                                            urllib.urlencode(conditions)]
+        cros_build_lib = autotest.chromite_load('cros_build_lib')
+        result = cros_build_lib.RunCommand(swarming_cmd, capture_output=True)
+        json_output = json.loads(result.output)
+        return json_output.get('items', [])
+
+    def query_task_by_id(self, task_id):
+        """Get task for given id.
+
+        @param task_id: A string to indicate a swarming task id.
+
+        @return a dict, which contains the task with the given task_id.
+        """
+        basic_swarming_cmd = self.get_basic_swarming_cmd('query')
+        swarming_cmd = basic_swarming_cmd + ['task/%s/result' % task_id]
+        cros_build_lib = autotest.chromite_load('cros_build_lib')
+        result = cros_build_lib.RunCommand(swarming_cmd, capture_output=True)
+        return json.loads(result.output)
+
+    def abort_task(self, task_id):
+        """Abort a swarming task by its id.
+
+        @param task_id: A string swarming task id.
+        """
+        basic_swarming_cmd = self.get_basic_swarming_cmd('cancel')
+        swarming_cmd = basic_swarming_cmd + ['--kill-running', task_id]
+        cros_build_lib = autotest.chromite_load('cros_build_lib')
+        try:
+            cros_build_lib.RunCommand(swarming_cmd, log_output=True)
+        except cros_build_lib.RunCommandError:
+            logging.error('Task %s probably already gone, skip canceling it.',
+                          task_id)
+
+    def query_bots_list(self, dimensions):
+        """Get bots list for given requirements.
+
+        @param dimensions: A dict of dimensions for swarming bots.
+
+        @return a list of bot dicts.
+        """
+        basic_swarming_cmd = self.get_basic_swarming_cmd('query')
+        conditions = [('dimensions', '%s:%s' % (k, v))
+                      for k, v in dimensions.iteritems()]
+        swarming_cmd = basic_swarming_cmd + ['bots/list?%s' %
+                                            urllib.urlencode(conditions)]
+        cros_build_lib = autotest.chromite_load('cros_build_lib')
+        result = cros_build_lib.RunCommand(swarming_cmd, capture_output=True)
+        return json.loads(result.output).get('items', [])
+
+    def get_child_tasks(self, parent_task_id):
+        """Get the child tasks based on a parent swarming task id.
+
+        @param parent_task_id: The parent swarming task id.
+
+        @return a list of dicts, each dict refers to the whole stats of a task,
+            keys include 'name', 'bot_dimensions', 'tags', 'bot_id', 'state',
+            etc.
+        """
+        swarming_cmd = self.get_basic_swarming_cmd('query')
+        swarming_cmd += ['tasks/list?tags=parent_task_id:%s' % parent_task_id]
+        timeout_util = autotest.chromite_load('timeout_util')
+        cros_build_lib = autotest.chromite_load('cros_build_lib')
+        with timeout_util.Timeout(60):
+            child_tasks = cros_build_lib.RunCommand(
+                    swarming_cmd, capture_output=True)
+            return json.loads(child_tasks.output)['items']
+
+    def get_basic_swarming_cmd(self, command):
+        cmd = [_get_client_path(), command, '--swarming', get_swarming_server()]
+        if self._auth_json_path:
+            cmd += ['--auth-service-account-json', self._auth_json_path]
+        return cmd
+
diff --git a/venv/skylab_suite/swarming_lib_unittest.py b/venv/skylab_suite/swarming_lib_unittest.py
deleted file mode 100644
index 2d60e96..0000000
--- a/venv/skylab_suite/swarming_lib_unittest.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from skylab_suite import swarming_lib
-
-
-def test_form_requests():
-    """Test raw requests for swarming API."""
-    task_name = 'provision_task'
-    parent_task_id = 'fake_parent_task_id'
-    priority = 70
-    tags = ['parent_task_id:fake_id']
-    tags.append('task_name:%s' % task_name)
-    user = 'skylab_suite'
-    fallback_dimensions = {'pool': 'ChromeOSSkylab'}
-    normal_dimensions = fallback_dimensions.copy()
-    normal_dimensions['provisionable-cros-version'] = (
-            'lumpy-release/R65-10323.58.0')
-    cmds = [['python', '-c', 'print("first")'],
-            ['python', '-c', 'print("second")']]
-    dimensions = [normal_dimensions, fallback_dimensions]
-    expiration_secs = swarming_lib.DEFAULT_EXPIRATION_SECS
-    timeout_secs = swarming_lib.DEFAULT_TIMEOUT_SECS
-    slice_expiration_secs = [expiration_secs, expiration_secs]
-
-    source_request = {
-            'name': task_name,
-            'parent_task_id': parent_task_id,
-            'priority': priority,
-            'tags': tags,
-            'user': user,
-            'task_slices': [
-                    {'expiration_secs': expiration_secs,
-                     'properties': {
-                            'command': cmds[0],
-                            'dimensions': [
-                                    {'key': 'pool',
-                                     'value': 'ChromeOSSkylab'},
-                                    {'key': 'provisionable-cros-version',
-                                     'value': 'lumpy-release/R65-10323.58.0'},
-                            ],
-                            'grace_period_secs': timeout_secs,
-                            'execution_timeout_secs': timeout_secs,
-                            'io_timeout_secs': timeout_secs,
-                     }},
-                    {'expiration_secs': expiration_secs,
-                     'properties': {
-                             'command': cmds[1],
-                             'dimensions': [
-                                    {'key': 'pool',
-                                     'value': 'ChromeOSSkylab'},
-                             ],
-                             'grace_period_secs': timeout_secs,
-                             'execution_timeout_secs': timeout_secs,
-                             'io_timeout_secs': timeout_secs,
-                     }},
-            ],
-    }
-
-    json_request = swarming_lib.make_fallback_request_dict(
-            cmds, dimensions, slice_expiration_secs, task_name, priority,
-            tags, user, parent_task_id=parent_task_id)
-    assert json_request == source_request
diff --git a/venv/skylab_suite/tko_test_views.py b/venv/skylab_suite/tko_test_views.py
new file mode 100644
index 0000000..4a67cc1
--- /dev/null
+++ b/venv/skylab_suite/tko_test_views.py
@@ -0,0 +1,209 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Utilities to summarize TKO results reported by tests in the suite.'''
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import mysql.connector
+
+def Error(Exception):
+    """Error detected in this script."""
+
+
+# Row corresponds to a single row of tko_test_view_2 table in AFE DB, but
+# contains only a subset of the columns in the table.
+Row = collections.namedtuple(
+        'Row',
+        'name, status, reason'
+)
+
+
+def get(conn, task_ids):
+    """Get tko_test_view_2 Row()s for given skylab task_ids.
+
+    @param conn: A MySQL connection to TKO.
+    @param task_ids: list of Skylab task request IDs to collect test views for.
+    @return: {task_id: [Row(...)...]}
+    """
+    try:
+        task_job_ids = _get_job_idxs_from_tko(conn, task_ids)
+        job_task_ids = {v: k for k, v in task_job_ids.iteritems()}
+        job_rows = _get_rows_from_tko(conn, job_task_ids.keys())
+        return {job_task_ids[k]: v for k, v in job_rows.iteritems()}
+    finally:
+        conn.close()
+
+
+def filter_failed(rows):
+    """Filter down given list of test_views Row() to failed tests."""
+    return [r for r in rows if r.status in _BAD_STATUSES]
+
+
+def main():
+    '''Entry-point to use this script standalone.'''
+    parser = argparse.ArgumentParser(
+            description='Summarize TKO results for a Skylab task')
+    parser.add_argument(
+            '--task-id',
+            action='append',
+            help='Swarming request ID for the skylab task (may be repeated)',
+    )
+    parser.add_argument(
+            '--host',
+            required=True,
+            help='TKO host IP',
+    )
+    parser.add_argument(
+            '--port',
+            type=int,
+            default=3306,
+            help='TKO port',
+    )
+    parser.add_argument(
+            '--user',
+            required=True,
+            help='TKO MySQL user',
+    )
+    parser.add_argument(
+            '--password',
+            required=True,
+            help='TKO MySQL password',
+    )
+    args = parser.parse_args()
+    if not args.task_id:
+        raise Error('Must request at least one --task-id')
+
+    conn = mysql.connector.connect(
+            host=args.host,
+            port=args.port,
+            user=args.user,
+            password=args.password,
+            database='chromeos_autotest_db',
+    )
+    views = get(conn, args.task_id)
+    for task_id, rows in views.iteritems():
+        print('Task ID: %s' % task_id)
+        for row in filter_failed(rows):
+            print('  %s in status %s' % (row.name, row.status))
+            print('    reason: %s' % (row.reason,))
+        print('')
+
+
+_BAD_STATUSES = {
+        'ABORT',
+        'ERROR',
+        'FAIL',
+}
+
+
+def _get_rows_from_tko(conn, tko_job_ids):
+    """Get a list of Row() for the given TKO job IDs.
+
+    @param conn: A MySQL connection.
+    @param job_ids: List of tko_job_ids to get Row()s for.
+    @return: {tko_job_id: [Row]}
+    """
+    job_rows = collections.defaultdict(list)
+    statuses = _get_status_map(conn)
+
+    _GET_TKO_TEST_VIEW_2 = """
+    SELECT job_idx, test_name, status_idx, reason FROM tko_test_view_2
+            WHERE invalid = 0 AND job_idx IN (%s)
+    """
+    q = _GET_TKO_TEST_VIEW_2 % ', '.join(['%s'] * len(tko_job_ids))
+    with _cursor(conn) as cursor:
+        cursor.execute(q, tko_job_ids)
+        for job_idx, name, s_idx, reason in cursor.fetchall():
+            job_rows[job_idx].append(
+                    Row(name, statuses.get(s_idx, 'UNKNOWN'), reason))
+    return dict(job_rows)
+
+
+def _get_job_idxs_from_tko(conn, task_ids):
+    """Get tko_job_idx for given task_ids.
+
+    Task execution reports the run ID to TKO, but Skylab clients only knows the
+    request ID of the created task.
+    Swarming executes a task with increasing run IDs, retrying on bot failure.
+    If a task is retried after the point where TKO results are reported, this
+    function returns the TKO job_idx corresponding to the last completed
+    attempt.
+
+    @param conn: MySQL connection to TKO.
+    @param task_ids: List of task request IDs to get TKO job IDs for.
+    @return {task_id: job_id}
+    """
+    task_runs = {}
+    run_ids = []
+    for task_id in task_ids:
+        run_ids += _run_ids_for_request(task_id)
+        task_runs[task_id] = list(reversed(run_ids))
+    run_job_idxs = _get_job_idxs_for_run_ids(conn, run_ids)
+
+    task_job_idxs = {}
+    for task_id, run_ids in task_runs.iteritems():
+        for run_id in run_ids:
+            if run_id in run_job_idxs:
+                task_job_idxs[task_id] = run_job_idxs[run_id]
+                break
+    return task_job_idxs
+
+
+def _get_job_idxs_for_run_ids(conn, run_ids):
+    """Get tko_job_idx for a given task run_ids.
+
+    @param conn: MySQL connection to TKO.
+    @param task_ids: List of task run IDs to get TKO job IDs for.
+    @return {run_id: job_id}
+    """
+    _GET_TKO_JOB_Q = """
+    SELECT task_id, tko_job_idx FROM tko_task_references
+            WHERE reference_type = "skylab" AND task_id IN (%s)
+    """
+    q = _GET_TKO_JOB_Q % ', '.join(['%s'] * len(run_ids))
+
+    job_idxs = {}
+    with _cursor(conn) as cursor:
+        cursor.execute(q, run_ids)
+        for run_id, tko_job_idx in cursor.fetchall():
+            if run_id in job_idxs:
+                raise Error('task run ID %s has multiple tko references' %
+                            (run_id,))
+            job_idxs[run_id] = tko_job_idx
+    return job_idxs
+
+
+def _get_status_map(conn):
+    statuses = {}
+    with _cursor(conn) as cursor:
+        cursor.execute('SELECT status_idx, word FROM tko_status')
+        r = cursor.fetchall()
+        for idx, word in r:
+            statuses[idx] = word
+    return statuses
+
+
+def _run_ids_for_request(request_id):
+    """Return Swarming run IDs for a given request ID, in ascending order."""
+    prefix = request_id[:len(request_id)-1]
+    return [prefix + i for i in ('1', '2')]
+
+
+@contextlib.contextmanager
+def _cursor(conn):
+    c = conn.cursor()
+    try:
+        yield c
+    finally:
+        c.close()
+
+
+if __name__ == '__main__':
+  main()