Merge to upstream 691026242b81aceac4805322e2a51cb5ccbb6766
diff --git a/bootstrap/brillo b/bootstrap/brillo
deleted file mode 120000
index 72196ce..0000000
--- a/bootstrap/brillo
+++ /dev/null
@@ -1 +0,0 @@
-../scripts/wrapper.py
\ No newline at end of file
diff --git a/bootstrap/scripts/brillo.py b/bootstrap/scripts/brillo.py
deleted file mode 100644
index 1841a47..0000000
--- a/bootstrap/scripts/brillo.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Bootstrap wrapper for 'brillo' command.
-
-For most commands of the form "brillo XYZ", we reinvoke
-REPO_DIR/chromite/bin/brillo XYZ, after detecting REPO_DIR based on the CWD.
-
-For the "brillo sdk" command, we reinvoke "../bin/brillo sdk" from the current
-git repository. This allows the SDK command to be run, even if there is no repo
-checkout.
-"""
-
-from __future__ import print_function
-
-import os
-
-from chromite.lib import bootstrap_lib
-from chromite.lib import cros_build_lib
-from chromite.lib import git
-from chromite.lib import workspace_lib
-
-
-def LocateBrilloCommand(args):
-  bootstrap_path = bootstrap_lib.FindBootstrapPath(save_to_env=True)
-
-  if len(args) >= 1 and args[0] == 'sdk':
-    if not bootstrap_path:
-      cros_build_lib.Die(
-          'You are bootstrapping chromite from a repo checkout.\n'
-          'You must use a git clone. (brbug.com/580: link docs)')
-
-    # Run 'brillo sdk' from the repository containing this command.
-    return os.path.join(bootstrap_path, 'bin', 'brillo')
-
-  # If we are in a workspace, and the workspace has an associated SDK, use it.
-  workspace_path = workspace_lib.WorkspacePath()
-  if workspace_path:
-    sdk_path = bootstrap_lib.GetActiveSdkPath(bootstrap_path, workspace_path)
-    if not sdk_path:
-      cros_build_lib.Die(
-          'The current workspace has no valid SDK.\n'
-          'Please run "brillo sdk --update" (brbug.com/580: link docs)')
-
-    # Use SDK associated with workspace, or nothing.
-    return os.path.join(sdk_path, 'chromite', 'bin', 'brillo')
-
-  # Run all other commands from 'brillo' wrapper in repo detected via CWD.
-  repo_path = git.FindRepoCheckoutRoot(os.getcwd())
-  if repo_path:
-    return os.path.join(repo_path, 'chromite', 'bin', 'brillo')
-
-  # Couldn't find the real brillo command to run.
-  cros_build_lib.Die('Unable to detect which SDK you want to use.')
-
-def main(args):
-  bin_cmd = LocateBrilloCommand(args)
-  os.execv(bin_cmd, [bin_cmd] + args)
diff --git a/bootstrap/scripts/brillo_unittest.py b/bootstrap/scripts/brillo_unittest.py
deleted file mode 100644
index c4c5226..0000000
--- a/bootstrap/scripts/brillo_unittest.py
+++ /dev/null
@@ -1,176 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Test the bootstrap brillo command."""
-
-from __future__ import print_function
-
-import mock
-import os
-
-from chromite.lib import cros_build_lib
-from chromite.lib import cros_test_lib
-from chromite.lib import git
-
-from chromite.bootstrap.scripts import brillo
-
-
-class TestBootstrapBrilloCmd(cros_test_lib.WorkspaceTestCase):
-  """Tests for the bootstrap brillo command."""
-
-  def setUp(self):
-    # Make certain we never exec anything.
-    self.mock_exec = self.PatchObject(os, 'execv', autospec=True)
-
-    self.mock_repo_root = self.PatchObject(
-        git, 'FindRepoCheckoutRoot', autospec=True)
-
-  def _verifyLocateBrilloCommand(self, expected):
-    self.assertEqual(expected,
-                     brillo.LocateBrilloCommand(['flash']))
-    self.assertEqual(expected,
-                     brillo.LocateBrilloCommand(['flash', '--help']))
-
-  def _verifyLocateBrilloCommandSdkHandling(self, expected):
-    self.assertEqual(expected,
-                     brillo.LocateBrilloCommand(['sdk']))
-    self.assertEqual(expected,
-                     brillo.LocateBrilloCommand(['sdk', '--help']))
-
-  def _verifyLocateBrilloCommandFail(self):
-    with self.assertRaises(cros_build_lib.DieSystemExit):
-      brillo.LocateBrilloCommand(['flash'])
-
-  def _verifyLocateBrilloCommandSdkFail(self):
-    with self.assertRaises(cros_build_lib.DieSystemExit):
-      brillo.LocateBrilloCommand(['sdk'])
-
-  def testCommandLookupActiveWorkspace(self):
-    """Test that sdk commands are run in the Git Repository."""
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace('1.2.3')
-
-    sdk_wrapper = os.path.join(
-        self.bootstrap_path, 'sdk_checkouts/1.2.3/chromite/bin/brillo')
-    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
-
-    # We are not inside a repo.
-    self.mock_repo_root.return_value = None
-
-    self._verifyLocateBrilloCommand(sdk_wrapper)
-    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
-
-    # We are inside a repo, shouldn't affect the result.
-    self.mock_repo_root.return_value = '/repo'
-
-    self._verifyLocateBrilloCommand(sdk_wrapper)
-    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
-
-  def testCommandLookupInactiveWorkspace(self):
-    """Test that sdk commands are run in the Git Repository."""
-    self.CreateBootstrap()
-    self.CreateWorkspace()
-    self.mock_repo_root.return_value = None
-
-    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
-
-    self._verifyLocateBrilloCommandFail()
-    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
-
-    # Having a repo root shouldn't affect the result.
-    self.mock_repo_root.return_value = '/repo'
-
-    self._verifyLocateBrilloCommandFail()
-    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
-
-  def testCommandLookupRepoFromBootstrap(self):
-    """Test that sdk commands are run in the Git Repository."""
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace()
-    self.mock_workspace_path.return_value = None
-    self.mock_repo_root.return_value = '/repo'
-
-    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
-    repo_wrapper = '/repo/chromite/bin/brillo'
-
-    self._verifyLocateBrilloCommand(repo_wrapper)
-    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
-
-  def testCommandLookupBootstrapOnly(self):
-    """Test that sdk commands are run in the Git Repository."""
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace()
-    self.mock_workspace_path.return_value = None
-    self.mock_repo_root.return_value = None
-
-    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
-
-    self._verifyLocateBrilloCommandFail()
-    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
-
-  def testCommandLookupRepoOnly(self):
-    """Test that sdk commands are run in the Git Repository."""
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace()
-    self.mock_bootstrap_path.return_value = None
-    self.mock_workspace_path.return_value = None
-    self.mock_repo_root.return_value = '/repo'
-
-    repo_wrapper = '/repo/chromite/bin/brillo'
-
-    self._verifyLocateBrilloCommand(repo_wrapper)
-    self._verifyLocateBrilloCommandSdkFail()
-
-  def testMainInActiveWorkspace(self):
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace('1.2.3')
-    self.mock_repo_root.return_value = None
-
-    brillo.main(['flash', '--help'])
-
-    expected_cmd = os.path.join(
-        self.bootstrap_path, 'sdk_checkouts/1.2.3/chromite/bin/brillo')
-
-    self.assertEqual(
-        [mock.call(expected_cmd, [expected_cmd, 'flash', '--help'])],
-        self.mock_exec.call_args_list)
-
-  def testMainInRepo(self):
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace('1.2.3')
-    self.mock_workspace_path.return_value = None
-    self.mock_repo_root.return_value = '/repo'
-
-    brillo.main(['flash', '--help'])
-
-    expected_cmd = '/repo/chromite/bin/brillo'
-
-    self.assertEqual(
-        [mock.call(expected_cmd, [expected_cmd, 'flash', '--help'])],
-        self.mock_exec.call_args_list)
-
-  def testMainNoCmd(self):
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace('1.2.3')
-    self.mock_workspace_path.return_value = None
-    self.mock_repo_root.return_value = None
-
-    with self.assertRaises(cros_build_lib.DieSystemExit):
-      brillo.main(['flash', '--help'])
-
-    self.assertEqual([], self.mock_exec.call_args_list)
-
-  def testMainSdkCmd(self):
-    self.CreateBootstrap('1.2.3')
-    self.CreateWorkspace('1.2.3')
-    self.mock_workspace_path.return_value = None
-    self.mock_repo_root.return_value = None
-
-    brillo.main(['sdk', '--help'])
-
-    expected_cmd = os.path.join(self.bootstrap_path, 'bin/brillo')
-
-    self.assertEqual(
-        [mock.call(expected_cmd, [expected_cmd, 'sdk', '--help'])],
-        self.mock_exec.call_args_list)
diff --git a/cbuildbot/archive_lib.py b/cbuildbot/archive_lib.py
index a4cef35..cf8d080 100644
--- a/cbuildbot/archive_lib.py
+++ b/cbuildbot/archive_lib.py
@@ -10,7 +10,6 @@
 
 from chromite.cbuildbot import commands
 from chromite.cbuildbot import config_lib
-from chromite.cbuildbot import constants
 
 from chromite.lib import cros_logging as logging
 from chromite.lib import gs
@@ -46,7 +45,7 @@
   if archive_base:
     return '%s/%s' % (archive_base, bot_id)
   elif remote_trybot or config.gs_path == config_lib.GS_PATH_DEFAULT:
-    return '%s/%s' % (constants.DEFAULT_ARCHIVE_BUCKET, bot_id)
+    return '%s/%s' % (config_lib.GetConfig().params.ARCHIVE_URL, bot_id)
   else:
     return config.gs_path
 
diff --git a/cbuildbot/archive_lib_unittest.py b/cbuildbot/archive_lib_unittest.py
index 96dc2c7..691e7df 100644
--- a/cbuildbot/archive_lib_unittest.py
+++ b/cbuildbot/archive_lib_unittest.py
@@ -121,14 +121,14 @@
   def testRemoteTrybotTrue(self):
     """Test GetBaseUploadURI with no archive base but remote_trybot is True."""
     expected_result = ('%s/trybot-%s' %
-                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                       (config_lib.GetConfig().params.ARCHIVE_URL,
                         DEFAULT_BOT_NAME))
     result = self._GetBaseUploadURI(remote_trybot=True)
     self.assertEqual(expected_result, result)
 
   def testBotIdRemoteTrybotTrue(self):
     expected_result = ('%s/%s' %
-                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                       (config_lib.GetConfig().params.ARCHIVE_URL,
                         self.BOT_ID))
     result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=True)
     self.assertEqual(expected_result, result)
@@ -139,14 +139,14 @@
 
     # Test without bot_id.
     expected_result = ('%s/%s' %
-                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                       (config_lib.GetConfig().params.ARCHIVE_URL,
                         DEFAULT_BOT_NAME))
     result = self._GetBaseUploadURI(remote_trybot=False)
     self.assertEqual(expected_result, result)
 
     # Test with bot_id.
     expected_result = ('%s/%s' %
-                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                       (config_lib.GetConfig().params.ARCHIVE_URL,
                         self.BOT_ID))
     result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=False)
     self.assertEqual(expected_result, result)
diff --git a/cbuildbot/binhost_test.py b/cbuildbot/binhost_test.py
index 0bab437..eb19707 100644
--- a/cbuildbot/binhost_test.py
+++ b/cbuildbot/binhost_test.py
@@ -40,7 +40,7 @@
   # TODO(davidjames): Empty this list.
   BOARDS_WITHOUT_CHROMIUM_PFQS = ['rush_ryu', 'smaug']
 
-  site_config = config_lib.LoadConfigFromFile()
+  site_config = config_lib.GetConfig()
 
   @classmethod
   def setUpClass(cls):
diff --git a/cbuildbot/builders/generic_builders.py b/cbuildbot/builders/generic_builders.py
index 5614774..a882310 100644
--- a/cbuildbot/builders/generic_builders.py
+++ b/cbuildbot/builders/generic_builders.py
@@ -14,6 +14,7 @@
 
 from chromite.cbuildbot import constants
 from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import manifest_version
 from chromite.cbuildbot import results_lib
 from chromite.cbuildbot import trybot_patch_pool
 from chromite.cbuildbot.stages import build_stages
@@ -152,9 +153,11 @@
   def GetVersionInfo(self):
     """Returns a manifest_version.VersionInfo object for this build.
 
-    Subclasses must override this method.
+    Chrome OS Subclasses must override this method. Site specific builds which
+    don't use Chrome OS versioning should leave this alone.
     """
-    raise NotImplementedError()
+    # Placeholder version for non-Chrome OS builds.
+    return manifest_version.VersionInfo('1.0.0')
 
   def GetSyncInstance(self):
     """Returns an instance of a SyncStage that should be run.
diff --git a/cbuildbot/builders/builders_unittest b/cbuildbot/builders/init_unittest
similarity index 100%
rename from cbuildbot/builders/builders_unittest
rename to cbuildbot/builders/init_unittest
diff --git a/cbuildbot/builders/builders_unittest.py b/cbuildbot/builders/init_unittest.py
similarity index 100%
rename from cbuildbot/builders/builders_unittest.py
rename to cbuildbot/builders/init_unittest.py
diff --git a/cbuildbot/builders/simple_builders.py b/cbuildbot/builders/simple_builders.py
index ce6ce19..4a5f02f 100644
--- a/cbuildbot/builders/simple_builders.py
+++ b/cbuildbot/builders/simple_builders.py
@@ -390,6 +390,8 @@
           not self._run.config.afdo_generate_min):
         self._RunStage(afdo_stages.AFDOUpdateEbuildStage)
     finally:
+      if self._run.config.master:
+        self._RunStage(report_stages.SlaveFailureSummaryStage)
       if self._run.config.push_overlays:
         publish = (was_build_successful and completion_successful and
                    build_finished)
diff --git a/cbuildbot/cbuildbot_run.py b/cbuildbot/cbuildbot_run.py
index ee2d67b..ef81864 100644
--- a/cbuildbot/cbuildbot_run.py
+++ b/cbuildbot/cbuildbot_run.py
@@ -169,7 +169,6 @@
       'instruction_urls_per_channel', # Set by ArchiveStage
       'success',                    # Set by cbuildbot.py:Builder
       'packages_under_test',        # Set by BuildPackagesStage.
-      'gce_tarball_generated',       # Set by ArchiveStage.
   ))
 
   # Attributes that need to be set by stages that can run in parallel
diff --git a/cbuildbot/chromeos_config.py b/cbuildbot/chromeos_config.py
index 77131d0..883510c 100644
--- a/cbuildbot/chromeos_config.py
+++ b/cbuildbot/chromeos_config.py
@@ -123,8 +123,6 @@
     async_kwargs = kwargs.copy()
     async_kwargs.update(async_dict)
     async_kwargs['priority'] = constants.HWTEST_POST_BUILD_PRIORITY
-    async_kwargs['retry'] = False
-    async_kwargs['max_retries'] = None
     async_kwargs['async'] = True
     async_kwargs['suite_min_duts'] = 1
 
@@ -359,12 +357,12 @@
     'veyron_mickey',
     'veyron_mighty',
     'veyron_minnie',
+    'veyron_minnie-cheets',
     'veyron_pinky',
     'veyron_rialto',
     'veyron_romy',
     'veyron_shark',
     'veyron_speedy',
-    'veyron_thea',
     'whirlwind',
 ])
 
@@ -386,10 +384,12 @@
     'butterfly',
     'candy',
     'celes',
+    'chell',
     'cid',
     'clapper',
     'cranky',
     'cyan',
+    'cyan-cheets',
     'enguarde',
     'expresso',
     'falco',
@@ -406,6 +406,8 @@
     'kunimitsu',
     'lakitu',
     'lakitu_mobbuild',
+    'lakitu_next',
+    'lars',
     'leon',
     'link',
     'lulu',
@@ -423,6 +425,7 @@
     'peppy',
     'quawks',
     'rambi',
+    'reks',
     'rikku',
     'samus',
     'slippy',
@@ -433,6 +436,7 @@
     'stumpy_moblab',
     'sumo',
     'swanky',
+    'terra',
     'tidus',
     'tricky',
     'ultima',
@@ -495,14 +499,18 @@
     'arkham',
     'gizmo',
     'kayle',
-    'lakitu',
-    'lakitu_mobbuild',
     'panther_embedded',
     'purin',
     'storm',
     'whirlwind',
 ])
 
+_lakitu_boards = frozenset([
+    'lakitu',
+    'lakitu_mobbuild',
+    'lakitu_next',
+])
+
 _moblab_boards = frozenset([
     'stumpy_moblab',
     'panther_moblab',
@@ -522,35 +530,25 @@
     'x32-generic',
 ])
 
-_noimagetest_boards = frozenset([
-    'lakitu',
-    'lakitu_mobbuild',
-])
+_noimagetest_boards = _lakitu_boards
 
-_nohwqual_boards = frozenset([
+_nohwqual_boards = _lakitu_boards | frozenset([
     'kayle',
-    'lakitu',
-    'lakitu_mobbuild',
 ])
 
 _norootfs_verification_boards = frozenset([
 ])
 
-_base_layout_boards = frozenset([
-    'lakitu',
-    'lakitu_mobbuild',
-])
+_base_layout_boards = _lakitu_boards
 
 _no_unittest_boards = frozenset((
 ))
 
-_upload_gce_images_boards = frozenset([
-    'lakitu',
-    'lakitu_mobbuild',
-])
+_upload_gce_images_boards = _lakitu_boards
 
-_no_vmtest_boards = _arm_boards | _brillo_boards
-
+_no_vmtest_boards = _arm_boards | _brillo_boards | frozenset((
+    'cyan-cheets',
+))
 
 # This is a list of configs that should be included on the main waterfall, but
 # aren't included by default (see IsDefaultMainWaterfall). This loosely
@@ -586,18 +584,24 @@
 
     constants.WATERFALL_INTERNAL: frozenset([
         # Experimental Paladins.
+        'lakitu_next-paladin',
         'panther_moblab-paladin',
         'stumpy_moblab-paladin',
+        'veyron_mighty-paladin',
+        'veyron_speedy-paladin',
 
         # Experimental Canaries (Group)
+        'kunimitsu-release-group',
         'storm-release-group',
         'strago-release-group',
-        'veyron-c-release-group',
+        'strago-b-release-group',
+        'veyron-d-release-group',
 
         # Experimental Canaries
         'bobcat-release',
         'daisy_winter-release',
         'kayle-release',
+        'lakitu_next-release',
         'nyan_freon-release',
         'panther_moblab-release',
         'rush_ryu-release',
@@ -607,12 +611,12 @@
         # Incremental Builders.
         'mario-incremental',
         'lakitu-incremental',
+        'lakitu_next-incremental',
 
         # Firmware Builders.
         'link-depthcharge-full-firmware',
 
         # Toolchain Builders.
-        'internal-toolchain-major',
         'internal-toolchain-minor',
     ]),
 
@@ -883,6 +887,15 @@
       dev_installer_prebuilts=False,
       # TODO(gauravsh): crbug.com/356414 Start running tests on Brillo configs.
       vm_tests=[],
+  )
+
+  lakitu = config_lib.BuildConfig(
+      sync_chrome=False,
+      chrome_sdk=False,
+      afdo_use=False,
+      dev_installer_prebuilts=False,
+      vm_tests=[],
+      vm_tests_override=None,
       hw_tests=[],
   )
 
@@ -1006,7 +1019,6 @@
   chrome_try = config_lib.BuildConfig(
       build_type=constants.CHROME_PFQ_TYPE,
       chrome_rev=constants.CHROME_REV_TOT,
-      use_lkgm=True,
       important=False,
       manifest_version=False,
   )
@@ -1036,7 +1048,6 @@
           'perf_v2', pool=constants.HWTEST_CHROME_PERF_POOL,
           timeout=90 * 60, critical=True, num=1)],
       use_chrome_lkgm=True,
-      use_lkgm=False,
       useflags=append_useflags(['-cros-debug']),
   )
 
@@ -1054,6 +1065,8 @@
         base.update(manifest=constants.OFFICIAL_MANIFEST)
       if board in _brillo_boards:
         base.update(brillo)
+      if board in _lakitu_boards:
+        base.update(lakitu)
       if board in _moblab_boards:
         base.update(moblab)
       if board in _minimal_profile_boards:
@@ -1235,16 +1248,6 @@
 
   _CreateConfigsForBoards(telemetry, _telemetry_boards, 'telemetry')
 
-  _toolchain_major = site_config.AddConfigWithoutTemplate(
-      'toolchain-major',
-      _cros_sdk,
-      latest_toolchain=True,
-      prebuilts=False,
-      trybot_list=False,
-      gcc_githash='svn-mirror/google/main',
-      description='Test next major toolchain revision',
-  )
-
   _toolchain_minor = site_config.AddConfigWithoutTemplate(
       'toolchain-minor',
       _cros_sdk,
@@ -1297,7 +1300,6 @@
       trybot_list=True,
   )
 
-
   site_config.AddConfig(
       tot_asan_info, 'amd64-generic-tot-asan-informational',
       boards=['amd64-generic'],
@@ -1372,7 +1374,6 @@
   _CreateConfigsForBoards(chrome_perf, _chrome_perf_boards, 'chrome-perf',
                           trybot_list=True)
 
-
   _CreateConfigsForBoards(chromium_info,
                           ['x86-generic', 'amd64-generic'],
                           'telem-chromium-pfq-informational',
@@ -1606,6 +1607,7 @@
       'nyan_freon',
       'falco',
       'gizmo',
+      'glados',
       'guado_moblab',
       'kayle',
       'lakitu',
@@ -1630,7 +1632,6 @@
       'stumpy',
       'tricky',
       'veyron_pinky',
-      'whirlwind',
       'wolf',
       'x86-alex',
       'x86-generic',
@@ -1667,11 +1668,19 @@
       'peach_pit',
       'peppy',
       'stumpy',
+      'veyron_mighty',
+      'veyron_speedy',
       'wolf',
       'x86-alex',
       'x86-zgb',
   ])
 
+
+  # Jetstream devices run unique hw tests
+  _paladin_jetstream_hwtest_boards = frozenset([
+      'whirlwind',
+  ])
+
   _paladin_moblab_hwtest_boards = frozenset([
       'guado_moblab',
   ])
@@ -1702,6 +1711,13 @@
                     blocking=True, num=1, timeout=120*60,
                     pool=constants.HWTEST_PALADIN_POOL)
             ])
+      if board in _paladin_jetstream_hwtest_boards:
+        customizations.update(
+            hw_tests=[
+                config_lib.HWTestConfig(
+                    constants.HWTEST_JETSTREAM_COMMIT_SUITE,
+                    pool=constants.HWTEST_PALADIN_POOL)
+            ])
       if board not in _paladin_important_boards:
         customizations.update(important=False)
       if board in _paladin_chroot_replace_boards:
@@ -1745,10 +1761,8 @@
           customizations,
           base_config)
 
-
   _CreatePaladinConfigs()
 
-
   site_config.AddConfig(
       internal_paladin, 'lumpy-incremental-paladin',
       boards=['lumpy'],
@@ -1758,8 +1772,8 @@
       unittests=False,
   )
 
-  ### Paladins (CQ builders) which do not run VM or Unit tests on the builder
-  ### itself.
+  # Paladins (CQ builders) which do not run VM or Unit tests on the builder
+  # itself.
   external_brillo_paladin = paladin.derive(brillo)
 
   site_config.AddConfig(
@@ -1816,15 +1830,15 @@
   ShardHWTestsBetweenBuilders('x86-zgb-paladin', 'x86-alex-paladin')
   ShardHWTestsBetweenBuilders('wolf-paladin', 'peppy-paladin')
   ShardHWTestsBetweenBuilders('daisy_skate-paladin', 'peach_pit-paladin')
+  ShardHWTestsBetweenBuilders('veyron_mighty-paladin', 'veyron_speedy-paladin')
   ShardHWTestsBetweenBuilders('lumpy-paladin', 'stumpy-paladin')
 
   # Add a pre-cq config for every board.
   _CreateConfigsForBoards(pre_cq, _all_boards, 'pre-cq')
-  # Override 'lakitu-pre-cq' - it's in _brillo_boards, but should run vmtests.
   site_config.AddConfig(
       pre_cq, 'lakitu-pre-cq',
       _base_configs['lakitu'],
-      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE, constants.GCE_VM_TEST_TYPE],
   )
 
   _CreateConfigsForBoards(no_vmtest_pre_cq, _all_boards, 'no-vmtest-pre-cq')
@@ -1913,15 +1927,13 @@
   site_config.AddConfig(
       internal_incremental, 'lakitu-incremental',
       _base_configs['lakitu'],
-      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE, constants.GCE_VM_TEST_TYPE],
   )
 
-  site_config.AddConfigWithoutTemplate(
-      'internal-toolchain-major',
-      _toolchain_major, internal, official,
-      boards=['x86-alex', 'stumpy', 'daisy', 'lakitu'],
-      build_tests=True,
-      description=_toolchain_major['description'] + ' (internal)',
+  site_config.AddConfig(
+      internal_incremental, 'lakitu_next-incremental',
+      _base_configs['lakitu_next'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE, constants.GCE_VM_TEST_TYPE],
   )
 
   site_config.AddConfigWithoutTemplate(
@@ -1945,6 +1957,7 @@
       manifest=constants.OFFICIAL_MANIFEST,
       manifest_version=True,
       images=['base', 'recovery', 'test', 'factory_install'],
+      sign_types=['recovery'],
       push_image=True,
       upload_symbols=True,
       binhost_bucket='gs://chromeos-dev-installer',
@@ -2159,6 +2172,13 @@
       chrome_sdk=False,
   )
 
+  site_config.AddConfig(
+      _release, 'smaug-release',
+      _base_configs['smaug'],
+      images=['base', 'recovery', 'test'],
+      sign_types=['nv_lp0_firmware'],
+  )
+
   # Now generate generic release configs if we haven't created anything more
   # specific above already.
   def _AddReleaseConfigs():
@@ -2303,18 +2323,26 @@
   site_config.AddConfig(
       _release, 'lakitu-release',
       _base_configs['lakitu'],
-      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE, constants.GCE_VM_TEST_TYPE],
+      sign_types=['base'],
       important=True,
   )
 
   site_config.AddConfig(
       _release, 'lakitu_mobbuild-release',
       _base_configs['lakitu_mobbuild'],
-      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE, constants.GCE_VM_TEST_TYPE],
       signer_tests=False,
       important=True,
   )
 
+  site_config.AddConfig(
+      _release, 'lakitu_next-release',
+      _base_configs['lakitu_next'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE, constants.GCE_VM_TEST_TYPE],
+      signer_tests=False,
+  )
+
   _wificell_pre_cq = site_config.AddTemplate(
       'wificell-pre-cq',
       pre_cq,
@@ -2480,7 +2508,6 @@
   _AddGroupConfig(
       'rambi-e', 'orco', (
           'heli',
-          'wizpig',
       ),
   )
 
@@ -2543,16 +2570,16 @@
   _AddGroupConfig(
       'veyron-c', 'veyron_brain', (
           'veyron_danger',
-          'veyron_thea',
-          'veyron_shark',
+          'veyron_mickey',
       ),
-      important=False,
   )
 
   _AddGroupConfig(
-      'veyron-d', 'veyron_mickey', (
+      'veyron-d', 'veyron_shark', (
           'veyron_romy',
+          'veyron_minnie-cheets',
       ),
+      important=False,
   )
 
   # jecht-based boards
@@ -2574,6 +2601,15 @@
       important=False,
   )
 
+  _AddGroupConfig(
+      'strago-b', 'reks', (
+          'cyan-cheets',
+          'wizpig',
+          'terra',
+      ),
+      important=False,
+  )
+
   # oak-based boards
   _AddGroupConfig(
       'oak', 'oak', (
@@ -2583,6 +2619,7 @@
   # glados-based boards
   _AddGroupConfig(
       'glados', 'glados', (
+          'chell',
       ),
   )
 
@@ -2598,7 +2635,9 @@
   # kunimitsu-based boards
   _AddGroupConfig(
       'kunimitsu', 'kunimitsu', (
+          'lars',
       ),
+      important=False,
   )
 
   # Factory and Firmware releases much inherit from these classes.
@@ -2617,6 +2656,7 @@
       description='Factory Builds',
       paygen=False,
       afdo_use=False,
+      sign_types=['factory'],
   )
 
   _firmware = config_lib.BuildConfig(
@@ -2638,6 +2678,7 @@
       trybot_list=False,
       paygen=False,
       image_test=False,
+      sign_types=['firmware'],
   )
 
   _firmware_release = site_config.AddTemplate(
@@ -2670,6 +2711,7 @@
       'beltino',
       'butterfly',
       'candy',
+      'chell',
       'clapper',
       'cyan',
       'daisy',
@@ -2682,6 +2724,7 @@
       'gnawty',
       'jecht',
       'kip',
+      'lars',
       'leon',
       'link',
       'lumpy',
@@ -2696,6 +2739,7 @@
       'peppy',
       'quawks',
       'rambi',
+      'reks',
       'rikku',
       'samus',
       'slippy',
@@ -2707,6 +2751,7 @@
       'stumpy',
       'sumo',
       'swanky',
+      'terra',
       'winky',
       'wolf',
       'x86-mario',
diff --git a/cbuildbot/commands.py b/cbuildbot/commands.py
index 84e6d96..5202944 100644
--- a/cbuildbot/commands.py
+++ b/cbuildbot/commands.py
@@ -62,6 +62,7 @@
 _SWARMING_EXPIRATION = 20 * 60
 _RUN_SUITE_PATH = '/usr/local/autotest/site_utils/run_suite.py'
 _ABORT_SUITE_PATH = '/usr/local/autotest/site_utils/abort_suite.py'
+_MAX_HWTEST_CMD_RETRY = 10
 
 
 # =========================== Command Helpers =================================
@@ -176,14 +177,16 @@
       try:
         if os.path.isdir(cwd):
           git.CleanAndDetachHead(cwd)
-          git.GarbageCollection(cwd)
+
+        if os.path.isdir(repo_git_store):
+          git.GarbageCollection(repo_git_store)
       except cros_build_lib.RunCommandError as e:
         result = e.result
         logging.PrintBuildbotStepWarnings()
         logging.warning('\n%s', result.error)
 
         # If there's no repository corruption, just delete the index.
-        corrupted = git.IsGitRepositoryCorrupted(cwd)
+        corrupted = git.IsGitRepositoryCorrupted(repo_git_store)
         lock.write_lock()
         logging.warning('Deleting %s because %s failed', cwd, result.cmd)
         osutils.RmDir(cwd, ignore_missing=True)
@@ -904,11 +907,13 @@
   try:
     cmd = [_RUN_SUITE_PATH]
     cmd += _GetRunSuiteArgs(build, suite, board, pool, num, file_bugs,
-                            wait_for_results, priority, timeout_mins, retry,
-                            max_retries, minimum_duts, suite_min_duts,
-                            offload_failures_only, subsystems)
+                            priority, timeout_mins, retry, max_retries,
+                            minimum_duts, suite_min_duts, offload_failures_only,
+                            subsystems)
     swarming_args = _CreateSwarmingArgs(build, suite, timeout_mins)
-    HWTestCreateAndWait(cmd, swarming_args, debug)
+    job_id = _HWTestCreate(cmd, swarming_args, debug)
+    if wait_for_results and job_id:
+      _HWTestWait(cmd, job_id, swarming_args)
   except cros_build_lib.RunCommandError as e:
     result = e.result
     if not result.task_summary_json:
@@ -960,8 +965,7 @@
 
 # pylint: disable=docstring-missing-args
 def _GetRunSuiteArgs(build, suite, board, pool=None, num=None,
-                     file_bugs=None, wait_for_results=None,
-                     priority=None, timeout_mins=None,
+                     file_bugs=None, priority=None, timeout_mins=None,
                      retry=None, max_retries=None, minimum_duts=0,
                      suite_min_duts=0, offload_failures_only=None,
                      subsystems=None):
@@ -990,9 +994,6 @@
   if file_bugs is not None:
     args += ['--file_bugs', str(file_bugs)]
 
-  if wait_for_results is not None:
-    args += ['--no_wait', str(not wait_for_results)]
-
   if priority is not None:
     args += ['--priority', priority]
 
@@ -1064,21 +1065,23 @@
   return swarming_args
 
 
-def HWTestCreateAndWait(cmd, swarming_args, debug=False):
-  """Start and wait on HWTest suite in the lab.
+def _HWTestCreate(cmd, swarming_args, debug=False):
+  """Start a suite in the HWTest lab, and return its id.
 
-  This method first run a command to create the suite.
-  And then run a second command to wait for the suite result.
-  Since we are using swarming client, which contiuously send
-  request to swarming server to poll task result, there is
-  no need to retry on any network related failures.
+  This method runs a command to create the suite. Since we are using
+  swarming client, which contiuously send request to swarming server
+  to poll task result, there is no need to retry on any network
+  related failures.
 
   Args:
     cmd: Proxied run_suite command.
     debug: If True, log command rather than running it.
     swarming_args: A dictionary of args to passed to RunSwarmingCommand.
+
+  Returns:
+    Job id of created suite. Returned id will be None if no job id was created.
   """
-  # Start the suite
+  # Start the suite.
   start_cmd = list(cmd) + ['-c']
 
   if debug:
@@ -1096,15 +1099,27 @@
     m = re.search(r'Created suite job:.*object_id=(?P<job_id>\d*)',
                   result.output)
     if m:
-      job_id = m.group('job_id')
-      # Wait on the suite
-      wait_cmd = list(cmd) + ['-m', str(job_id)]
-      result = swarming_lib.RunSwarmingCommand(
-          wait_cmd, capture_output=True, combine_stdout_stderr=True,
-          **swarming_args)
-      for output in result.task_summary_json['shards'][0]['outputs']:
-        sys.stdout.write(output)
-      sys.stdout.flush()
+      return m.group('job_id')
+  return None
+
+def _HWTestWait(cmd, job_id, swarming_args):
+  """Wait for HWTest suite to complete.
+
+  Args:
+    cmd: Proxied run_suite command.
+    job_id: The job id of the suite that was created.
+    swarming_args: A dictionary of args to passed to RunSwarmingCommand.
+  """
+  # Wait on the suite
+  wait_cmd = list(cmd) + ['-m', str(job_id)]
+  result = swarming_lib.RunSwarmingCommandWithRetries(
+      max_retry=_MAX_HWTEST_CMD_RETRY,
+      error_check=swarming_lib.SwarmingRetriableErrorCheck,
+      cmd=wait_cmd, capture_output=True, combine_stdout_stderr=True,
+      **swarming_args)
+  for output in result.task_summary_json['shards'][0]['outputs']:
+    sys.stdout.write(output)
+  sys.stdout.flush()
 
 
 def AbortHWTests(config_type_or_name, version, debug, suite=''):
@@ -1562,21 +1577,33 @@
     _UploadPathToGS(uploaded_file_path, upload_urls, debug, timeout)
 
 
-def UploadSymbols(buildroot, board, official, cnt, failed_list):
+def UploadSymbols(buildroot, board=None, official=False, cnt=None,
+                  failed_list=None, breakpad_root=None, product_name=None,
+                  error_code_ok=True):
   """Upload debug symbols for this build."""
-  cmd = ['upload_symbols', '--yes', '--board', board,
-         '--root', os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)]
-  if failed_list is not None:
-    cmd += ['--failed-list', str(failed_list)]
+  cmd = ['upload_symbols', '--yes']
+
+  if board is not None:
+    # Board requires both root and board to be set to be useful.
+    cmd += [
+        '--root', os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR),
+        '--board', board]
   if official:
     cmd.append('--official_build')
   if cnt is not None:
     cmd += ['--upload-limit', str(cnt)]
+  if failed_list is not None:
+    cmd += ['--failed-list', str(failed_list)]
+  if breakpad_root is not None:
+    cmd += ['--breakpad_root', breakpad_root]
+  if product_name is not None:
+    cmd += ['--product_name', product_name]
 
   # We don't want to import upload_symbols directly because it uses the
   # swarming module which itself imports a _lot_ of stuff.  It has also
   # been known to hang.  We want to keep cbuildbot isolated & robust.
-  ret = RunBuildScript(buildroot, cmd, chromite_cmd=True, error_code_ok=True)
+  ret = RunBuildScript(buildroot, cmd, chromite_cmd=True,
+                       error_code_ok=error_code_ok)
   if ret.returncode:
     # TODO(davidjames): Convert this to a fatal error.
     # See http://crbug.com/212437
@@ -1959,7 +1986,15 @@
     A KeyError is a required field is missing from artifact_info.
   """
   if 'archive' not in artifact_info:
-    # Nothing to do, just return the list as-is.
+    # Copy the file in 'paths' as is to the archive directory.
+    if len(artifact_info['paths']) > 1:
+      raise ValueError('default archive type does not support multiple inputs')
+    src_image = os.path.join(image_dir, artifact_info['paths'][0])
+    tgt_image = os.path.join(archive_dir, artifact_info['paths'][0])
+    if not os.path.exists(tgt_image):
+      # The image may have already been copied into place. If so, overwriting it
+      # can affect parallel processes.
+      shutil.copy(src_image, tgt_image)
     return artifact_info['paths']
 
   inputs = artifact_info['paths']
diff --git a/cbuildbot/commands_unittest.py b/cbuildbot/commands_unittest.py
index da89b7b..2aeb7a8 100644
--- a/cbuildbot/commands_unittest.py
+++ b/cbuildbot/commands_unittest.py
@@ -211,6 +211,11 @@
 02-23-2015 [06:26:53] Created suite job: http://cautotest.corp.google.com/afe/#tab_id=view_job&object_id=26960110
 @@@STEP_LINK@Suite created@http://cautotest.corp.google.com/afe/#tab_id=view_job&object_id=26960110@@@
 '''
+
+  WAIT_RETRY_OUTPUT = '''
+ERROR: Encountered swarming internal error
+'''
+
   WAIT_OUTPUT = '''
 The suite job has another 3:09:50.012887 till timeout.
 The suite job has another 2:39:39.789250 till timeout.
@@ -228,7 +233,7 @@
     self._pool = 'test-pool'
     self._num = 42
     self._file_bugs = True
-    self._wait_for_results = False
+    self._wait_for_results = True
     self._priority = 'test-priority'
     self._timeout_mins = 23
     self._retry = False
@@ -238,6 +243,11 @@
     self.create_cmd = None
     self.wait_cmd = None
     self.temp_json_path = os.path.join(self.tempdir, 'temp_summary.json')
+    # Bot died
+    self.retriable_swarming_code = 80
+    self.internal_failure_exit_code = 1
+    # A random code that's not retriable.
+    self.swarming_code = 10
     topology.FetchTopologyFromCIDB(None)
 
   def RunHWTestSuite(self, *args, **kwargs):
@@ -250,7 +260,8 @@
       finally:
         print(logs.messages)
 
-  def SetCmdResults(self, create_return_code=0, wait_return_code=0, args=(),
+  def SetCmdResults(self, create_return_code=0, wait_return_code=0,
+                    wait_retry=False, args=(),
                     swarming_timeout_secs=SWARMING_TIMEOUT_DEFAULT,
                     swarming_io_timeout_secs=SWARMING_TIMEOUT_DEFAULT,
                     swarming_hard_timeout_secs=SWARMING_TIMEOUT_DEFAULT,
@@ -260,6 +271,7 @@
     Args:
       create_return_code: Return code from create command.
       wait_return_code: Return code from wait command.
+      wait_retry: Boolean, if wait command should be retried.
       args: Additional args to pass to create and wait commands.
       swarming_timeout_secs: swarming client timeout.
       swarming_io_timeout_secs: swarming client io timeout.
@@ -292,12 +304,21 @@
         self.create_cmd,
         side_effect=lambda *args, **kwargs: create_results.next(),
     )
+    wait_results_list = []
+    if wait_retry:
+      r = self.rc.CmdResult(
+          returncode=self.internal_failure_exit_code,
+          output=self.WAIT_RETRY_OUTPUT,
+          error='')
+      wait_results_list.append(r)
 
-    wait_results = iter([
-        self.rc.CmdResult(returncode=wait_return_code,
-                          output=self.WAIT_OUTPUT,
-                          error=''),
-    ])
+    wait_results_list.append(
+        self.rc.CmdResult(
+            returncode=wait_return_code, output=self.WAIT_OUTPUT,
+            error='')
+    )
+    wait_results = iter(wait_results_list)
+
     self.rc.AddCmdResult(
         self.wait_cmd,
         side_effect=lambda *args, **kwargs: wait_results.next(),
@@ -309,8 +330,11 @@
     Args:
       task_outputs: A list of tuple, the first element is the value of 'outputs'
                     field in the json dictionary, the second is a boolean
-                    indicating whether there is an internal failure.
-                    ('some output', True)
+                    indicating whether there is an internal failure,
+                    the third is a state code for the internal failure.
+                    e.g.
+                    ('some output', True, 80)
+                    ('some output', False, None)
     """
     orig_func = commands._CreateSwarmingArgs
 
@@ -327,6 +351,7 @@
         j = {'shards':[{'name': 'fake_name', 'bot_id': 'chromeos-server990',
                         'created_ts': '2015-06-12 12:00:00',
                         'internal_failure': s[1],
+                        'state': s[2],
                         'outputs': [s[0]]}]}
         return_values.append(j)
       return_values_iter = iter(return_values)
@@ -339,16 +364,15 @@
   def testRunHWTestSuiteMinimal(self):
     """Test RunHWTestSuite without optional arguments."""
     self.SetCmdResults()
-    self.PatchJson([(self.JOB_ID_OUTPUT, False), (self.WAIT_OUTPUT, False)])
+    # When run without optional arguments, wait_for_result default to None.
+    # the wait cmd will not run.
+    self.PatchJson([(self.JOB_ID_OUTPUT, False, None)])
 
     with self.OutputCapturer() as output:
       self.RunHWTestSuite()
     self.assertCommandCalled(self.create_cmd, capture_output=True,
                              combine_stdout_stderr=True)
-    self.assertCommandCalled(self.wait_cmd, capture_output=True,
-                             combine_stdout_stderr=True)
     self.assertIn(self.JOB_ID_OUTPUT, '\n'.join(output.GetStdoutLines()))
-    self.assertIn(self.WAIT_OUTPUT, '\n'.join(output.GetStdoutLines()))
 
   def testRunHWTestSuiteMaximal(self):
     """Test RunHWTestSuite with all arguments."""
@@ -357,7 +381,7 @@
     self.SetCmdResults(
         args=[
             '--pool', 'test-pool', '--num', '42',
-            '--file_bugs', 'True', '--no_wait', 'True',
+            '--file_bugs', 'True',
             '--priority', 'test-priority', '--timeout_mins', '23',
             '--retry', 'False', '--max_retries', '3', '--minimum_duts', '2',
             '--suite_min_duts', '2'
@@ -366,7 +390,8 @@
         swarming_io_timeout_secs=swarming_timeout,
         swarming_hard_timeout_secs=swarming_timeout)
 
-    self.PatchJson([(self.JOB_ID_OUTPUT, False), (self.WAIT_OUTPUT, False)])
+    self.PatchJson([(self.JOB_ID_OUTPUT, False, None),
+                    (self.WAIT_OUTPUT, False, None)])
     with self.OutputCapturer() as output:
       self.RunHWTestSuite(self._pool, self._num, self._file_bugs,
                           self._wait_for_results, self._priority,
@@ -382,35 +407,35 @@
 
   def testRunHWTestSuiteFailure(self):
     """Test RunHWTestSuite when ERROR is returned."""
-    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.PatchJson([(self.JOB_ID_OUTPUT, False, None)])
     self.rc.SetDefaultCmdResult(returncode=1, output=self.JOB_ID_OUTPUT)
     with self.OutputCapturer():
       self.assertRaises(failures_lib.TestFailure, self.RunHWTestSuite)
 
   def testRunHWTestSuiteTimedOut(self):
     """Test RunHWTestSuite when SUITE_TIMEOUT is returned."""
-    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.PatchJson([(self.JOB_ID_OUTPUT, False, None)])
     self.rc.SetDefaultCmdResult(returncode=4, output=self.JOB_ID_OUTPUT)
     with self.OutputCapturer():
       self.assertRaises(failures_lib.SuiteTimedOut, self.RunHWTestSuite)
 
   def testRunHWTestSuiteInfraFail(self):
     """Test RunHWTestSuite when INFRA_FAILURE is returned."""
-    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.PatchJson([(self.JOB_ID_OUTPUT, False, None)])
     self.rc.SetDefaultCmdResult(returncode=3, output=self.JOB_ID_OUTPUT)
     with self.OutputCapturer():
       self.assertRaises(failures_lib.TestLabFailure, self.RunHWTestSuite)
 
   def testRunHWTestBoardNotAvailable(self):
     """Test RunHWTestSuite when BOARD_NOT_AVAILABLE is returned."""
-    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.PatchJson([(self.JOB_ID_OUTPUT, False, None)])
     self.rc.SetDefaultCmdResult(returncode=5, output=self.JOB_ID_OUTPUT)
     with self.OutputCapturer():
       self.assertRaises(failures_lib.BoardNotAvailable, self.RunHWTestSuite)
 
   def testRunHWTestTestWarning(self):
     """Test RunHWTestSuite when WARNING is returned."""
-    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.PatchJson([(self.JOB_ID_OUTPUT, False, None)])
     self.rc.SetDefaultCmdResult(returncode=2, output=self.JOB_ID_OUTPUT)
     with self.OutputCapturer():
       self.assertRaises(failures_lib.TestWarning, self.RunHWTestSuite)
@@ -425,15 +450,35 @@
       self.assertIn(unknown_failure, '\n'.join(output.GetStdoutLines()))
 
   def testRunHWTestTestSwarmingClientInternalFailure(self):
-    """Test RunHWTestSuite when no summary file is generated."""
+    """Test RunHWTestSuite when swarming encounters internal failure."""
     unknown_failure = 'Unknown failure'
-    self.PatchJson(task_outputs=[(self.JOB_ID_OUTPUT, True)])
+    self.PatchJson(
+        task_outputs=[(self.JOB_ID_OUTPUT, True, self.swarming_code)])
     self.rc.SetDefaultCmdResult(returncode=1, output=unknown_failure)
     with self.OutputCapturer() as output:
       self.assertRaises(failures_lib.SwarmingProxyFailure, self.RunHWTestSuite)
       self.assertIn(unknown_failure, '\n'.join(output.GetStdoutLines()))
       self.assertIn('summary json content', '\n'.join(output.GetStdoutLines()))
 
+  def testRunHWTestTestSwarmingClientWithRetires(self):
+    """Test RunHWTestSuite with retries."""
+    self.SetCmdResults(wait_retry=True)
+    self.PatchJson(
+        [(self.JOB_ID_OUTPUT, False, None),
+         (self.WAIT_RETRY_OUTPUT, True, self.retriable_swarming_code),
+         (self.WAIT_OUTPUT, False, None),
+        ])
+    with self.OutputCapturer() as output:
+      self.RunHWTestSuite(wait_for_results=self._wait_for_results)
+      self.assertCommandCalled(self.create_cmd, capture_output=True,
+                               combine_stdout_stderr=True)
+      self.assertCommandCalled(self.wait_cmd, capture_output=True,
+                               combine_stdout_stderr=True)
+      self.assertIn(self.WAIT_RETRY_OUTPUT.strip(),
+                    '\n'.join(output.GetStdoutLines()))
+      self.assertIn(self.WAIT_OUTPUT, '\n'.join(output.GetStdoutLines()))
+      self.assertIn(self.JOB_ID_OUTPUT, '\n'.join(output.GetStdoutLines()))
+
   def testGetRunSuiteArgsWithSubsystems(self):
     """Test _GetRunSuiteArgs when subsystems is specified."""
     result_1 = commands._GetRunSuiteArgs(build=self._build,
@@ -570,27 +615,44 @@
     commands.GenerateBreakpadSymbols(self.tempdir, self._board, False)
     self.assertCommandContains(['--board=%s' % self._board])
 
-  def testUploadSymbols(self, official=False, cnt=None):
-    """Test UploadSymbols Command."""
-    commands.UploadSymbols(self.tempdir, self._board, official, cnt, None)
-    self.assertCommandContains(['--board', self._board])
-    self.assertCommandContains(['--official_build'], expected=official)
-    self.assertCommandContains(['--upload-limit'], expected=cnt is not None)
-    self.assertCommandContains(['--failed-list'], expected=False)
-
-  def testOfficialUploadSymbols(self):
+  def testUploadSymbolsMinimal(self):
     """Test uploading symbols for official builds"""
-    self.testUploadSymbols(official=True)
+    commands.UploadSymbols('/buildroot', 'MyBoard')
+    self.assertCommandContains(
+        ['/buildroot/chromite/bin/upload_symbols', '--yes',
+         '--root', '/buildroot/chroot',
+         '--board', 'MyBoard'])
 
-  def testLimitUploadSymbols(self):
-    """Test uploading a limited number of symbols"""
-    self.testUploadSymbols(cnt=10)
+  def testUploadSymbolsMinimalNoneChromeOS(self):
+    """Test uploading symbols for official builds"""
+    commands.UploadSymbols(
+        '/buildroot', breakpad_root='/breakpad', product_name='CoolProduct')
+    self.assertCommandContains(
+        ['/buildroot/chromite/bin/upload_symbols', '--yes',
+         '--breakpad_root', '/breakpad',
+         '--product_name', 'CoolProduct'])
+
+  def testUploadSymbolsMaximal(self):
+    """Test uploading symbols for official builds"""
+    commands.UploadSymbols(
+        '/buildroot', 'MyBoard', official=True, cnt=55,
+        failed_list='/failed_list.txt', breakpad_root='/breakpad',
+        product_name='CoolProduct')
+    self.assertCommandContains(
+        ['/buildroot/chromite/bin/upload_symbols', '--yes',
+         '--root', '/buildroot/chroot',
+         '--board', 'MyBoard',
+         '--official_build',
+         '--upload-limit', '55',
+         '--failed-list', '/failed_list.txt',
+         '--breakpad_root', '/breakpad',
+         '--product_name', 'CoolProduct'])
 
   def testFailedUploadSymbols(self):
     """Test when uploading fails"""
     self.rc.SetDefaultCmdResult(returncode=1, error='i am sad')
     # This should not throw an exception.
-    commands.UploadSymbols(self.tempdir, self._board, None, None, None)
+    commands.UploadSymbols(self.tempdir)
 
   def testPushImages(self):
     """Test PushImages Command."""
diff --git a/cbuildbot/config_dump.json b/cbuildbot/config_dump.json
index 83d8cb7..fac7927 100644
--- a/cbuildbot/config_dump.json
+++ b/cbuildbot/config_dump.json
@@ -84,6 +84,7 @@
         "sanity_check_slaves": null,
         "separate_debug_symbols": true,
         "shared_user_password": null,
+        "sign_types": null,
         "signer_tests": false,
         "sync_chrome": null,
         "trybot_list": false,
@@ -192,8 +193,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "profile": "asan",
             "vm_tests": [
@@ -214,8 +215,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "important": false,
             "internal": true,
@@ -225,7 +226,6 @@
             "unittests": false,
             "uprev": false,
             "use_chrome_lkgm": true,
-            "use_lkgm": false,
             "useflags": [
                 "-cros-debug",
                 "chrome_internal"
@@ -244,8 +244,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "important": true,
             "internal": true,
@@ -273,8 +273,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "important": false,
             "internal": true,
@@ -282,7 +282,6 @@
             "manifest_version": false,
             "overlays": "both",
             "uprev": false,
-            "use_lkgm": true,
             "useflags": [
                 "chrome_internal"
             ],
@@ -301,8 +300,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "important": true,
             "manifest_version": true,
@@ -323,14 +322,13 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "important": false,
             "manifest_version": false,
             "overlays": "public",
             "uprev": false,
-            "use_lkgm": true,
             "vm_tests": [
                 "smoke_suite",
                 "pfq_suite"
@@ -357,8 +355,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "image_test": true,
             "images": [
@@ -396,8 +394,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "hwqual": true,
             "image_test": false,
@@ -413,6 +411,9 @@
             ],
             "paygen": false,
             "push_image": true,
+            "sign_types": [
+                "firmware"
+            ],
             "signer_tests": false,
             "sync_chrome": false,
             "trybot_list": false,
@@ -442,8 +443,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "image_test": false,
             "images": [],
@@ -455,6 +456,9 @@
                 "chromeos-base/autotest-all"
             ],
             "paygen": false,
+            "sign_types": [
+                "firmware"
+            ],
             "signer_tests": false,
             "sync_chrome": false,
             "trybot_list": false,
@@ -488,8 +492,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "hwqual": true,
             "image_test": true,
@@ -506,6 +510,9 @@
             "overlays": "both",
             "paygen": false,
             "push_image": true,
+            "sign_types": [
+                "factory"
+            ],
             "signer_tests": true,
             "trybot_list": true,
             "upload_hw_test_artifacts": false,
@@ -541,8 +548,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "hwqual": true,
             "image_test": false,
@@ -558,6 +565,9 @@
             ],
             "paygen": false,
             "push_image": true,
+            "sign_types": [
+                "firmware"
+            ],
             "signer_tests": false,
             "sync_chrome": false,
             "trybot_list": false,
@@ -582,8 +592,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "image_test": true,
             "images": [
@@ -604,8 +614,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "overlays": "public",
             "uprev": false
@@ -616,8 +626,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "profile": "llvm"
         },
@@ -644,8 +654,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "hwqual": true,
             "image_test": true,
@@ -664,6 +674,9 @@
             "paygen_skip_delta_payloads": true,
             "paygen_skip_testing": true,
             "push_image": true,
+            "sign_types": [
+                "recovery"
+            ],
             "signer_tests": false,
             "trybot_list": true,
             "upload_symbols": true,
@@ -697,8 +710,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "image_test": true,
             "images": [
@@ -726,8 +739,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "image_test": true,
             "images": [
@@ -785,8 +798,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "image_test": true,
             "images": [
@@ -852,14 +865,14 @@
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 6,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 6,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "hwqual": true,
             "image_test": true,
@@ -876,6 +889,9 @@
             "overlays": "both",
             "paygen": true,
             "push_image": true,
+            "sign_types": [
+                "recovery"
+            ],
             "signer_tests": true,
             "trybot_list": true,
             "upload_symbols": true,
@@ -928,6 +944,9 @@
             "overlays": "both",
             "paygen": false,
             "push_image": false,
+            "sign_types": [
+                "recovery"
+            ],
             "signer_tests": true,
             "trybot_list": false,
             "upload_symbols": true,
@@ -960,15 +979,15 @@
             "hw_tests": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 4,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": false,\n    \"suite\": \"perf_v2\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
             ],
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "hwqual": true,
             "image_test": true,
@@ -985,6 +1004,9 @@
             "overlays": "both",
             "paygen": false,
             "push_image": false,
+            "sign_types": [
+                "recovery"
+            ],
             "signer_tests": true,
             "trybot_list": false,
             "upload_symbols": true,
@@ -1006,8 +1028,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "overlays": "public",
             "uprev": false,
@@ -1021,8 +1043,8 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "internal": true,
             "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
@@ -1041,15 +1063,14 @@
             "hw_tests_override": [
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
                 "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
-                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": true,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
             ],
             "important": false,
             "manifest_version": false,
             "overlays": "public",
             "profile": "asan",
             "uprev": false,
-            "use_lkgm": true,
             "vm_tests": [
                 "smoke_suite"
             ],
@@ -1428,7 +1449,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "sync_chrome": false,
         "vm_tests": []
     },
@@ -2853,7 +2873,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "image_test": false,
         "images": [
             "base",
@@ -2923,7 +2942,6 @@
                 "chrome_sdk": false,
                 "dev_installer_prebuilts": false,
                 "grouped": true,
-                "hw_tests": [],
                 "image_test": false,
                 "images": [
                     "base",
@@ -2947,7 +2965,6 @@
                 "chrome_sdk_build_chrome": false,
                 "dev_installer_prebuilts": false,
                 "grouped": true,
-                "hw_tests": [],
                 "image_test": false,
                 "images": [
                     "base",
@@ -2965,7 +2982,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "image_test": false,
         "images": [
             "base",
@@ -3102,7 +3118,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "image_test": false,
         "images": [
             "base",
@@ -4384,6 +4399,171 @@
             "-chrome_internal"
         ]
     },
+    "chell-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "chell"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "chell-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "chell"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "chell-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "chell"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "chell-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "chell"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "chell-full": {
+        "_template": "full",
+        "boards": [
+            "chell"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "chell-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "chell"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "chell-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "chell"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "chell-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "chell"
+        ]
+    },
+    "chell-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "chell"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "chell-release": {
+        "_template": "release",
+        "boards": [
+            "chell"
+        ]
+    },
+    "chell-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "chell"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "chell"
+                ],
+                "grouped": true,
+                "name": "chell-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "chell"
+                ],
+                "grouped": true,
+                "name": "chell-release-afdo-use"
+            }
+        ]
+    },
+    "chell-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "chell"
+        ]
+    },
+    "chell-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "chell"
+        ]
+    },
+    "chell-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "chell"
+        ],
+        "manifest": "official.xml"
+    },
+    "chell-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "chell"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
     "chromiumos-sdk": {
         "active_waterfall": "chromiumos",
         "archive_build_debug": true,
@@ -4908,6 +5088,173 @@
             "-chrome_internal"
         ]
     },
+    "cyan-cheets-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "cyan-cheets-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "cyan-cheets-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cyan-cheets-full": {
+        "_template": "full",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "cyan-cheets-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cyan-cheets-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "cyan-cheets-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "cyan-cheets"
+        ]
+    },
+    "cyan-cheets-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "cyan-cheets-release": {
+        "_template": "release",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "vm_tests": []
+    },
+    "cyan-cheets-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "cyan-cheets"
+                ],
+                "grouped": true,
+                "name": "cyan-cheets-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "cyan-cheets"
+                ],
+                "grouped": true,
+                "name": "cyan-cheets-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "cyan-cheets-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "vm_tests": []
+    },
+    "cyan-cheets-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "vm_tests": []
+    },
+    "cyan-cheets-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "cyan-cheets-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "cyan-cheets"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
     "cyan-chrome-pfq": {
         "_template": "chrome-pfq",
         "boards": [
@@ -6981,7 +7328,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "important": true,
         "paygen": false,
         "signer_tests": false,
@@ -7087,9 +7433,25 @@
                 "useflags": [
                     "-chrome_internal"
                 ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "chell"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "chell-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
             }
         ],
-        "description": "Full Builds; Group config (boards: glados)",
+        "description": "Full Builds; Group config (boards: glados, chell)",
         "prebuilts": "public",
         "useflags": [
             "-chrome_internal"
@@ -7110,11 +7472,11 @@
     },
     "glados-paladin": {
         "_template": "paladin",
+        "active_waterfall": "chromeos",
         "boards": [
             "glados"
         ],
         "description": "Commit Queue (internal)",
-        "important": false,
         "internal": true,
         "manifest": "official.xml",
         "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
@@ -7200,9 +7562,21 @@
                 ],
                 "grouped": true,
                 "name": "glados-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "chell"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "chell-release",
+                "unittests": null,
+                "vm_tests": []
             }
         ],
-        "description": "Release Builds (canary) (internal); Group config (boards: glados)",
+        "description": "Release Builds (canary) (internal); Group config (boards: glados, chell)",
         "important": true
     },
     "glados-tot-chrome-pfq-informational": {
@@ -8112,41 +8486,6 @@
             "-chrome_internal"
         ]
     },
-    "internal-toolchain-major": {
-        "active_waterfall": "chromeos",
-        "archive_build_debug": true,
-        "boards": [
-            "x86-alex",
-            "stumpy",
-            "daisy",
-            "lakitu"
-        ],
-        "build_type": "chroot",
-        "builder_class_name": "sdk_builders.ChrootSdkBuilder",
-        "chrome_sdk": true,
-        "chromeos_official": true,
-        "description": "Test next major toolchain revision (internal)",
-        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
-        "gcc_githash": "svn-mirror/google/main",
-        "git_sync": true,
-        "hw_tests_override": [],
-        "image_test": true,
-        "images": [
-            "base",
-            "recovery",
-            "test",
-            "factory_install"
-        ],
-        "internal": true,
-        "latest_toolchain": true,
-        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
-        "overlays": "both",
-        "use_sdk": false,
-        "useflags": [
-            "chrome_internal"
-        ],
-        "usepkg_build_packages": false
-    },
     "internal-toolchain-minor": {
         "active_waterfall": "chromeos",
         "archive_build_debug": true,
@@ -8761,7 +9100,6 @@
         "dev_manifest": "kayle.xml",
         "factory_install_netboot": false,
         "factory_toolkit": false,
-        "hw_tests": [],
         "hwqual": false,
         "images": [
             "base"
@@ -9182,9 +9520,25 @@
                 "useflags": [
                     "-chrome_internal"
                 ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "lars"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "lars-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
             }
         ],
-        "description": "Full Builds; Group config (boards: kunimitsu)",
+        "description": "Full Builds; Group config (boards: kunimitsu, lars)",
         "prebuilts": "public",
         "useflags": [
             "-chrome_internal"
@@ -9295,10 +9649,21 @@
                 ],
                 "grouped": true,
                 "name": "kunimitsu-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "lars"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "lars-release",
+                "unittests": null,
+                "vm_tests": []
             }
         ],
-        "description": "Release Builds (canary) (internal); Group config (boards: kunimitsu)",
-        "important": true
+        "description": "Release Builds (canary) (internal); Group config (boards: kunimitsu, lars)"
     },
     "kunimitsu-tot-chrome-pfq-informational": {
         "_template": "chrome-pfq-informational",
@@ -9401,7 +9766,8 @@
             "chrome_internal"
         ],
         "vm_tests": [
-            "smoke_suite"
+            "smoke_suite",
+            "gce_vm_test"
         ]
     },
     "lakitu-no-vmtest-pre-cq": {
@@ -9465,6 +9831,10 @@
         "upload_gce_images": true,
         "useflags": [
             "chrome_internal"
+        ],
+        "vm_tests": [
+            "smoke_suite",
+            "gce_vm_test"
         ]
     },
     "lakitu-release": {
@@ -9481,10 +9851,14 @@
         "hwqual": false,
         "image_test": false,
         "important": true,
+        "sign_types": [
+            "base"
+        ],
         "sync_chrome": false,
         "upload_gce_images": true,
         "vm_tests": [
-            "smoke_suite"
+            "smoke_suite",
+            "gce_vm_test"
         ]
     },
     "lakitu-tot-chrome-pfq-informational": {
@@ -9660,7 +10034,8 @@
         "sync_chrome": false,
         "upload_gce_images": true,
         "vm_tests": [
-            "smoke_suite"
+            "smoke_suite",
+            "gce_vm_test"
         ]
     },
     "lakitu_mobbuild-tot-chrome-pfq-informational": {
@@ -9687,6 +10062,370 @@
         ],
         "vm_tests": []
     },
+    "lakitu_next-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "lakitu_next"
+        ],
+        "chrome_sdk": false,
+        "disk_layout": "base",
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": []
+    },
+    "lakitu_next-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "lakitu_next"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "disk_layout": "base",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu_next-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "lakitu_next"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lakitu_next-full": {
+        "_template": "full",
+        "boards": [
+            "lakitu_next"
+        ],
+        "chrome_sdk": false,
+        "disk_layout": "base",
+        "image_test": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu_next-incremental": {
+        "_template": "incremental",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lakitu_next"
+        ],
+        "description": "Incremental Builds (internal)",
+        "disk_layout": "base",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [
+            "smoke_suite",
+            "gce_vm_test"
+        ]
+    },
+    "lakitu_next-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "lakitu_next"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lakitu_next-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lakitu_next"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "disk_layout": "base",
+        "image_test": false,
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "lakitu_next-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "lakitu_next"
+        ]
+    },
+    "lakitu_next-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "lakitu_next"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu_next-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "lakitu_next"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "disk_layout": "base",
+        "hw_tests": [],
+        "hwqual": false,
+        "image_test": false,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": [
+            "smoke_suite",
+            "gce_vm_test"
+        ]
+    },
+    "lakitu_next-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "lakitu_next"
+        ],
+        "disk_layout": "base",
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": []
+    },
+    "lakitu_next-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "lakitu_next"
+        ],
+        "disk_layout": "base",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lars-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "lars"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "lars-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "lars"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lars-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "lars"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lars-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "lars"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "lars-full": {
+        "_template": "full",
+        "boards": [
+            "lars"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lars-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "lars"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lars-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "lars"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lars-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "lars"
+        ]
+    },
+    "lars-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "lars"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lars-release": {
+        "_template": "release",
+        "boards": [
+            "lars"
+        ]
+    },
+    "lars-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "lars"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "lars"
+                ],
+                "grouped": true,
+                "name": "lars-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "lars"
+                ],
+                "grouped": true,
+                "name": "lars-release-afdo-use"
+            }
+        ]
+    },
+    "lars-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "lars"
+        ]
+    },
+    "lars-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "lars"
+        ]
+    },
+    "lars-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "lars"
+        ],
+        "manifest": "official.xml"
+    },
+    "lars-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "lars"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
     "leon-chrome-pfq": {
         "_template": "chrome-pfq",
         "boards": [
@@ -12870,7 +13609,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "important": true,
         "paygen": false,
         "profile": "minimal",
@@ -12941,7 +13679,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "sync_chrome": false,
         "vm_tests": []
     },
@@ -14595,7 +15332,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "sync_chrome": false,
         "vm_tests": []
     },
@@ -15386,25 +16122,9 @@
                     "-chrome_internal"
                 ],
                 "vm_tests": []
-            },
-            {
-                "_template": "full",
-                "boards": [
-                    "wizpig"
-                ],
-                "build_packages_in_background": true,
-                "chrome_sdk_build_chrome": false,
-                "grouped": true,
-                "name": "wizpig-full",
-                "prebuilts": "public",
-                "unittests": null,
-                "useflags": [
-                    "-chrome_internal"
-                ],
-                "vm_tests": []
             }
         ],
-        "description": "Full Builds; Group config (boards: orco, heli, wizpig)",
+        "description": "Full Builds; Group config (boards: orco, heli)",
         "prebuilts": "public",
         "useflags": [
             "-chrome_internal"
@@ -15436,21 +16156,9 @@
                 "name": "heli-release",
                 "unittests": null,
                 "vm_tests": []
-            },
-            {
-                "_template": "release",
-                "boards": [
-                    "wizpig"
-                ],
-                "build_packages_in_background": true,
-                "chrome_sdk_build_chrome": false,
-                "grouped": true,
-                "name": "wizpig-release",
-                "unittests": null,
-                "vm_tests": []
             }
         ],
-        "description": "Release Builds (canary) (internal); Group config (boards: orco, heli, wizpig)",
+        "description": "Release Builds (canary) (internal); Group config (boards: orco, heli)",
         "important": true
     },
     "rambi-firmware": {
@@ -15594,6 +16302,171 @@
         "hw_tests_override": [],
         "vm_tests": []
     },
+    "reks-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "reks"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "reks-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "reks"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "reks-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "reks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "reks-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "reks"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "reks-full": {
+        "_template": "full",
+        "boards": [
+            "reks"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "reks-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "reks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "reks-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "reks"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "reks-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "reks"
+        ]
+    },
+    "reks-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "reks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "reks-release": {
+        "_template": "release",
+        "boards": [
+            "reks"
+        ]
+    },
+    "reks-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "reks"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "reks"
+                ],
+                "grouped": true,
+                "name": "reks-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "reks"
+                ],
+                "grouped": true,
+                "name": "reks-release-afdo-use"
+            }
+        ]
+    },
+    "reks-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "reks"
+        ]
+    },
+    "reks-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "reks"
+        ]
+    },
+    "reks-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "reks"
+        ],
+        "manifest": "official.xml"
+    },
+    "reks-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "reks"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
     "rikku-chrome-pfq": {
         "_template": "chrome-pfq",
         "boards": [
@@ -16918,6 +17791,9 @@
             "recovery",
             "test"
         ],
+        "sign_types": [
+            "nv_lp0_firmware"
+        ],
         "vm_tests": []
     },
     "smaug-release-afdo": {
@@ -17387,7 +18263,6 @@
         ],
         "chrome_sdk": false,
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "paygen_skip_testing": true,
         "signer_tests": false,
         "sync_chrome": false,
@@ -17410,7 +18285,6 @@
                 "chrome_sdk": false,
                 "dev_installer_prebuilts": false,
                 "grouped": true,
-                "hw_tests": [],
                 "name": "storm-release",
                 "paygen_skip_testing": true,
                 "signer_tests": false,
@@ -17428,7 +18302,6 @@
                 "chrome_sdk_build_chrome": false,
                 "dev_installer_prebuilts": false,
                 "grouped": true,
-                "hw_tests": [],
                 "name": "arkham-release",
                 "sync_chrome": false,
                 "unittests": null,
@@ -17444,7 +18317,6 @@
                 "chrome_sdk": false,
                 "chrome_sdk_build_chrome": false,
                 "grouped": true,
-                "hw_tests": [],
                 "name": "whirlwind-release",
                 "sync_chrome": false,
                 "unittests": null,
@@ -17454,7 +18326,6 @@
         "chrome_sdk": false,
         "description": "Release Builds (canary) (internal); Group config (boards: storm, arkham, whirlwind)",
         "dev_installer_prebuilts": false,
-        "hw_tests": [],
         "paygen_skip_testing": true,
         "signer_tests": false,
         "sync_chrome": false,
@@ -17647,6 +18518,133 @@
             "-chrome_internal"
         ]
     },
+    "strago-b-full-group": {
+        "_template": "full",
+        "boards": [
+            "reks"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "reks"
+                ],
+                "grouped": true,
+                "name": "reks-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "cyan-cheets"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "cyan-cheets-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "wizpig"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "wizpig-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "terra"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "terra-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: reks, cyan-cheets, wizpig, terra)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "strago-b-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "reks"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "reks"
+                ],
+                "grouped": true,
+                "name": "reks-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "cyan-cheets"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "cyan-cheets-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "wizpig"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "wizpig-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "terra"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "terra-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: reks, cyan-cheets, wizpig, terra)"
+    },
     "strago-chrome-pfq": {
         "_template": "chrome-pfq",
         "boards": [
@@ -18698,6 +19696,171 @@
         "builder_class_name": "test_builders.ManifestVersionedSyncBuilder",
         "hw_tests_override": []
     },
+    "terra-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "terra"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "terra-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "terra"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "terra-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "terra"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "terra-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "terra"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "terra-full": {
+        "_template": "full",
+        "boards": [
+            "terra"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "terra-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "terra"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "terra-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "terra"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "terra-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "terra"
+        ]
+    },
+    "terra-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "terra"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "terra-release": {
+        "_template": "release",
+        "boards": [
+            "terra"
+        ]
+    },
+    "terra-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "terra"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "terra"
+                ],
+                "grouped": true,
+                "name": "terra-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "terra"
+                ],
+                "grouped": true,
+                "name": "terra-release-afdo-use"
+            }
+        ]
+    },
+    "terra-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "terra"
+        ]
+    },
+    "terra-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "terra"
+        ]
+    },
+    "terra-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "terra"
+        ],
+        "manifest": "official.xml"
+    },
+    "terra-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "terra"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
     "test-ap-group": {
         "_template": "test-ap",
         "boards": [
@@ -18876,32 +20039,6 @@
             "-chrome_internal"
         ]
     },
-    "toolchain-major": {
-        "archive_build_debug": true,
-        "boards": [
-            "x86-generic",
-            "arm-generic",
-            "amd64-generic"
-        ],
-        "build_type": "chroot",
-        "builder_class_name": "sdk_builders.ChrootSdkBuilder",
-        "chrome_sdk": true,
-        "description": "Test next major toolchain revision",
-        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
-        "gcc_githash": "svn-mirror/google/main",
-        "git_sync": true,
-        "hw_tests_override": [],
-        "image_test": true,
-        "images": [
-            "base",
-            "recovery",
-            "test",
-            "factory_install"
-        ],
-        "latest_toolchain": true,
-        "use_sdk": false,
-        "usepkg_build_packages": false
-    },
     "toolchain-minor": {
         "archive_build_debug": true,
         "boards": [
@@ -19413,28 +20550,12 @@
             {
                 "_template": "full",
                 "boards": [
-                    "veyron_thea"
+                    "veyron_mickey"
                 ],
                 "build_packages_in_background": true,
                 "chrome_sdk_build_chrome": false,
                 "grouped": true,
-                "name": "veyron_thea-full",
-                "prebuilts": "public",
-                "unittests": null,
-                "useflags": [
-                    "-chrome_internal"
-                ],
-                "vm_tests": []
-            },
-            {
-                "_template": "full",
-                "boards": [
-                    "veyron_shark"
-                ],
-                "build_packages_in_background": true,
-                "chrome_sdk_build_chrome": false,
-                "grouped": true,
-                "name": "veyron_shark-full",
+                "name": "veyron_mickey-full",
                 "prebuilts": "public",
                 "unittests": null,
                 "useflags": [
@@ -19443,7 +20564,7 @@
                 "vm_tests": []
             }
         ],
-        "description": "Full Builds; Group config (boards: veyron_brain, veyron_danger, veyron_thea, veyron_shark)",
+        "description": "Full Builds; Group config (boards: veyron_brain, veyron_danger, veyron_mickey)",
         "prebuilts": "public",
         "useflags": [
             "-chrome_internal"
@@ -19481,44 +20602,34 @@
             {
                 "_template": "release",
                 "boards": [
-                    "veyron_thea"
+                    "veyron_mickey"
                 ],
                 "build_packages_in_background": true,
                 "chrome_sdk_build_chrome": false,
                 "grouped": true,
-                "name": "veyron_thea-release",
-                "unittests": null,
-                "vm_tests": []
-            },
-            {
-                "_template": "release",
-                "boards": [
-                    "veyron_shark"
-                ],
-                "build_packages_in_background": true,
-                "chrome_sdk_build_chrome": false,
-                "grouped": true,
-                "name": "veyron_shark-release",
+                "hw_tests": [],
+                "name": "veyron_mickey-release",
                 "unittests": null,
                 "vm_tests": []
             }
         ],
-        "description": "Release Builds (canary) (internal); Group config (boards: veyron_brain, veyron_danger, veyron_thea, veyron_shark)",
+        "description": "Release Builds (canary) (internal); Group config (boards: veyron_brain, veyron_danger, veyron_mickey)",
+        "important": true,
         "vm_tests": []
     },
     "veyron-d-full-group": {
         "_template": "full",
         "boards": [
-            "veyron_mickey"
+            "veyron_shark"
         ],
         "child_configs": [
             {
                 "_template": "full",
                 "boards": [
-                    "veyron_mickey"
+                    "veyron_shark"
                 ],
                 "grouped": true,
-                "name": "veyron_mickey-full",
+                "name": "veyron_shark-full",
                 "prebuilts": "public",
                 "useflags": [
                     "-chrome_internal"
@@ -19540,9 +20651,25 @@
                     "-chrome_internal"
                 ],
                 "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_minnie-cheets"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_minnie-cheets-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
             }
         ],
-        "description": "Full Builds; Group config (boards: veyron_mickey, veyron_romy)",
+        "description": "Full Builds; Group config (boards: veyron_shark, veyron_romy, veyron_minnie-cheets)",
         "prebuilts": "public",
         "useflags": [
             "-chrome_internal"
@@ -19553,17 +20680,16 @@
         "_template": "release",
         "active_waterfall": "chromeos",
         "boards": [
-            "veyron_mickey"
+            "veyron_shark"
         ],
         "child_configs": [
             {
                 "_template": "release",
                 "boards": [
-                    "veyron_mickey"
+                    "veyron_shark"
                 ],
                 "grouped": true,
-                "hw_tests": [],
-                "name": "veyron_mickey-release",
+                "name": "veyron_shark-release",
                 "vm_tests": []
             },
             {
@@ -19578,11 +20704,21 @@
                 "name": "veyron_romy-release",
                 "unittests": null,
                 "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_minnie-cheets"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_minnie-cheets-release",
+                "unittests": null,
+                "vm_tests": []
             }
         ],
-        "description": "Release Builds (canary) (internal); Group config (boards: veyron_mickey, veyron_romy)",
-        "hw_tests": [],
-        "important": true,
+        "description": "Release Builds (canary) (internal); Group config (boards: veyron_shark, veyron_romy, veyron_minnie-cheets)",
         "vm_tests": []
     },
     "veyron-full-group": {
@@ -20799,10 +21935,14 @@
     },
     "veyron_mighty-paladin": {
         "_template": "paladin",
+        "active_waterfall": "chromeos",
         "boards": [
             "veyron_mighty"
         ],
         "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
         "important": false,
         "internal": true,
         "manifest": "official.xml",
@@ -20901,6 +22041,173 @@
         ],
         "vm_tests": []
     },
+    "veyron_minnie-cheets-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_minnie-cheets-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_minnie-cheets-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_minnie-cheets-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_minnie-cheets"
+        ]
+    },
+    "veyron_minnie-cheets-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_minnie-cheets"
+                ],
+                "grouped": true,
+                "name": "veyron_minnie-cheets-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_minnie-cheets"
+                ],
+                "grouped": true,
+                "name": "veyron_minnie-cheets-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_minnie-cheets-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_minnie-cheets"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
     "veyron_minnie-chrome-pfq": {
         "_template": "chrome-pfq",
         "boards": [
@@ -21804,10 +23111,14 @@
     },
     "veyron_speedy-paladin": {
         "_template": "paladin",
+        "active_waterfall": "chromeos",
         "boards": [
             "veyron_speedy"
         ],
         "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
         "important": false,
         "internal": true,
         "manifest": "official.xml",
@@ -21920,173 +23231,6 @@
         ],
         "vm_tests": []
     },
-    "veyron_thea-chrome-pfq": {
-        "_template": "chrome-pfq",
-        "boards": [
-            "veyron_thea"
-        ],
-        "important": false,
-        "manifest": "official.xml",
-        "vm_tests": []
-    },
-    "veyron_thea-chromium-pfq": {
-        "_template": "chromium-pfq",
-        "boards": [
-            "veyron_thea"
-        ],
-        "description": "Preflight Chromium Uprev & Build (internal)",
-        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
-        "important": false,
-        "internal": true,
-        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
-        "overlays": "both",
-        "prebuilts": "public",
-        "useflags": [
-            "-chrome_internal"
-        ],
-        "vm_tests": []
-    },
-    "veyron_thea-compile-only-pre-cq": {
-        "_template": "compile-only-pre-cq",
-        "boards": [
-            "veyron_thea"
-        ],
-        "internal": true,
-        "manifest": "official.xml",
-        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
-        "overlays": "both",
-        "useflags": [
-            "chrome_internal"
-        ]
-    },
-    "veyron_thea-full": {
-        "_template": "full",
-        "boards": [
-            "veyron_thea"
-        ],
-        "prebuilts": "public",
-        "useflags": [
-            "-chrome_internal"
-        ],
-        "vm_tests": []
-    },
-    "veyron_thea-no-vmtest-pre-cq": {
-        "_template": "no-vmtest-pre-cq",
-        "boards": [
-            "veyron_thea"
-        ],
-        "internal": true,
-        "manifest": "official.xml",
-        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
-        "overlays": "both",
-        "useflags": [
-            "chrome_internal"
-        ]
-    },
-    "veyron_thea-paladin": {
-        "_template": "paladin",
-        "boards": [
-            "veyron_thea"
-        ],
-        "description": "Commit Queue (internal)",
-        "important": false,
-        "internal": true,
-        "manifest": "official.xml",
-        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
-        "overlays": "both",
-        "prebuilts": "private",
-        "useflags": [
-            "chrome_internal"
-        ],
-        "vm_tests": [],
-        "vm_tests_override": null
-    },
-    "veyron_thea-payloads": {
-        "_template": "payloads",
-        "boards": [
-            "veyron_thea"
-        ]
-    },
-    "veyron_thea-pre-cq": {
-        "_template": "pre-cq",
-        "boards": [
-            "veyron_thea"
-        ],
-        "internal": true,
-        "manifest": "official.xml",
-        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
-        "overlays": "both",
-        "useflags": [
-            "chrome_internal"
-        ],
-        "vm_tests": []
-    },
-    "veyron_thea-release": {
-        "_template": "release",
-        "boards": [
-            "veyron_thea"
-        ],
-        "vm_tests": []
-    },
-    "veyron_thea-release-afdo": {
-        "_template": "release-afdo-generate",
-        "boards": [
-            "veyron_thea"
-        ],
-        "child_configs": [
-            {
-                "_template": "release-afdo-generate",
-                "boards": [
-                    "veyron_thea"
-                ],
-                "grouped": true,
-                "name": "veyron_thea-release-afdo-generate",
-                "vm_tests": []
-            },
-            {
-                "_template": "release-afdo-use",
-                "boards": [
-                    "veyron_thea"
-                ],
-                "grouped": true,
-                "name": "veyron_thea-release-afdo-use",
-                "vm_tests": []
-            }
-        ],
-        "vm_tests": []
-    },
-    "veyron_thea-release-afdo-generate": {
-        "_template": "release-afdo-generate",
-        "boards": [
-            "veyron_thea"
-        ],
-        "vm_tests": []
-    },
-    "veyron_thea-release-afdo-use": {
-        "_template": "release-afdo-use",
-        "boards": [
-            "veyron_thea"
-        ],
-        "vm_tests": []
-    },
-    "veyron_thea-tot-chrome-pfq-informational": {
-        "_template": "chrome-pfq-informational",
-        "boards": [
-            "veyron_thea"
-        ],
-        "manifest": "official.xml",
-        "vm_tests": []
-    },
-    "veyron_thea-tot-chromium-pfq-informational": {
-        "_template": "chromium-pfq-informational",
-        "boards": [
-            "veyron_thea"
-        ],
-        "useflags": [
-            "-chrome_internal"
-        ],
-        "vm_tests": []
-    },
     "whirlwind-chrome-pfq": {
         "_template": "chrome-pfq",
         "boards": [
@@ -22160,12 +23304,15 @@
     },
     "whirlwind-paladin": {
         "_template": "paladin",
-        "active_waterfall": "chromeos",
         "boards": [
             "whirlwind"
         ],
         "chrome_sdk": false,
         "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"cq\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"jetstream_cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+        ],
+        "important": false,
         "internal": true,
         "manifest": "official.xml",
         "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
@@ -22206,7 +23353,6 @@
             "whirlwind"
         ],
         "chrome_sdk": false,
-        "hw_tests": [],
         "sync_chrome": false,
         "vm_tests": []
     },
diff --git a/cbuildbot/config_lib.py b/cbuildbot/config_lib.py
index b9f71b2..226ad0b 100644
--- a/cbuildbot/config_lib.py
+++ b/cbuildbot/config_lib.py
@@ -269,7 +269,6 @@
 
   Some combinations of member settings are invalid:
     * A suite config may not specify both blocking and async.
-    * A suite config may not specify both retry and async.
     * A suite config may not specify both warn_only and critical.
   """
   # This timeout is larger than it needs to be because of autotest overhead.
@@ -284,7 +283,7 @@
                retry=True, max_retries=10, minimum_duts=0, suite_min_duts=0,
                offload_failures_only=False):
     """Constructor -- see members above."""
-    assert not async or (not blocking and not retry)
+    assert not async or not blocking
     assert not warn_only or not critical
     self.suite = suite
     self.num = num
@@ -699,6 +698,10 @@
       # manifest versions that we may later want to branch off of.
       branch_util_test=False,
 
+      # If specified, it is passed on to the PushImage script as '--sign-types'
+      # commandline argument.  Must be either None or a list of image types.
+      sign_types=None,
+
       # TODO(sosa): Collapse to one option.
       # ========== Dev installer prebuilts options =======================
 
@@ -873,7 +876,10 @@
       INTERNAL_MANIFEST_VERSIONS_PATH=None,
 
       # URL of the repo project.
-      REPO_URL='https://chromium.googlesource.com/external/repo'
+      REPO_URL='https://chromium.googlesource.com/external/repo',
+
+      # GS URL in which to archive build artifacts.
+      ARCHIVE_URL='gs://chromeos-image-archive',
   )
 
   return default_site_params
diff --git a/cbuildbot/constants.py b/cbuildbot/constants.py
index 168f2e7..0a64af3 100644
--- a/cbuildbot/constants.py
+++ b/cbuildbot/constants.py
@@ -442,6 +442,7 @@
 HWTEST_COMMIT_SUITE = 'bvt-cq'
 HWTEST_CANARY_SUITE = 'bvt-perbuild'
 HWTEST_AFDO_SUITE = 'AFDO_record'
+HWTEST_JETSTREAM_COMMIT_SUITE = 'jetstream_cq'
 HWTEST_MOBLAB_SUITE = 'moblab'
 HWTEST_MOBLAB_QUICK_SUITE = 'moblab_quick'
 HWTEST_SANITY_SUITE = 'sanity'
@@ -721,7 +722,6 @@
                                       image_bin)
   return '%s_gce.tar.gz' % os.path.splitext(image_bin)[0]
 
-DEFAULT_ARCHIVE_BUCKET = 'gs://chromeos-image-archive'
 RELEASE_BUCKET = 'gs://chromeos-releases'
 TRASH_BUCKET = 'gs://chromeos-throw-away-bucket'
 CHROME_SYSROOT_TAR = 'sysroot_%s.tar.xz' % _SlashToUnderscore(CHROME_CP)
@@ -760,7 +760,12 @@
 IMAGE_TYPE_RECOVERY = 'recovery'
 IMAGE_TYPE_FACTORY = 'factory'
 IMAGE_TYPE_FIRMWARE = 'firmware'
+# NVidia Tegra SoC resume firmware blob.
 IMAGE_TYPE_NV_LP0_FIRMWARE = 'nv_lp0_firmware'
+# USB PD accessory microcontroller firmware (e.g. power brick, display dongle).
+IMAGE_TYPE_ACCESSORY_USBPD = 'accessory_usbpd'
+# Standalone accessory microcontroller firmware (e.g. wireless keyboard).
+IMAGE_TYPE_ACCESSORY_RWSIG = 'accessory_rwsig'
 
 IMAGE_TYPE_TO_NAME = {
     IMAGE_TYPE_BASE: BASE_IMAGE_BIN,
diff --git a/cbuildbot/lkgm_manager.py b/cbuildbot/lkgm_manager.py
index 05e4eb3..947b7b2 100644
--- a/cbuildbot/lkgm_manager.py
+++ b/cbuildbot/lkgm_manager.py
@@ -249,6 +249,11 @@
     self.InitializeManifestVariables(version_info)
 
     self.GenerateBlameListSinceLKGM()
+
+    # Throw away CLs that might not be used this run.
+    if validation_pool:
+      validation_pool.FilterChangesForThrottledTree()
+
     new_manifest = self.CreateManifest()
 
     # For Chrome PFQ, add the version of Chrome to use.
@@ -326,6 +331,9 @@
       build_id: Optional integer cidb build id of the build publishing the
                 manifest.
 
+    Returns:
+      Path to the manifest version file to use.
+
     Raises:
       GenerateBuildSpecException in case of failure to check-in the new
         manifest because of a git error or the manifest is already checked-in.
diff --git a/cbuildbot/manifest_version.py b/cbuildbot/manifest_version.py
index 92e5de3..bc74a8e 100644
--- a/cbuildbot/manifest_version.py
+++ b/cbuildbot/manifest_version.py
@@ -7,6 +7,7 @@
 from __future__ import print_function
 
 import cPickle
+import datetime
 import fnmatch
 import glob
 import os
@@ -418,6 +419,125 @@
                               dashboard_url=self.dashboard_url))
 
 
+class SlaveStatus(object):
+  """A Class to easily interpret data from CIDB regarding slave status.
+
+  This is intended for short lived instances used to determine if the loop on
+  getting the builders statuses should continue or break out.  The main function
+  is ShouldWait() with everything else pretty much a helper function for it.
+  """
+
+  BUILDER_START_TIMEOUT = 5
+
+  def __init__(self, status, start_time, builders_array, previous_completed):
+    """Initializes a slave status object.
+
+    Args:
+      status: Dict of the slave status from CIDB.
+      start_time: datetime.datetime object of when the build started.
+      builders_array: List of the expected builders.
+      previous_completed: Set of builders that have finished already.
+    """
+    self.status = status
+    self.start_time = start_time
+    self.builders_array = builders_array
+    self.previous_completed = previous_completed
+    self.completed = []
+
+  def GetMissing(self):
+    """Returns the missing builders.
+
+    Returns:
+      A list of the missing builders
+    """
+    return list(set(self.builders_array) - set(self.status.keys()))
+
+  def GetCompleted(self):
+    """Returns the builders that have completed.
+
+    Returns:
+      A list of the completed builders.
+    """
+    if not self.completed:
+      self.completed = [b for b, s in self.status.iteritems()
+                        if s in constants.BUILDER_COMPLETED_STATUSES and
+                        b in self.builders_array]
+
+    # Logging of the newly complete builders.
+    for builder in sorted(set(self.completed) - self.previous_completed):
+      logging.info('Build config %s completed with status "%s".',
+                   builder, self.status[builder])
+    self.previous_completed.update(set(self.completed))
+    return self.completed
+
+  def Completed(self):
+    """Returns a bool if all builders have completed successfully.
+
+    Returns:
+      A bool of True if all builders successfully completed, False otherwise.
+    """
+    return len(self.GetCompleted()) == len(self.builders_array)
+
+  def ShouldFailForBuilderStartTimeout(self, current_time):
+    """Decides if we should fail if a builder hasn't started within 5 mins.
+
+    If a builder hasn't started within BUILDER_START_TIMEOUT and the rest of the
+    builders have finished, let the caller know that we should fail.
+
+    Args:
+      current_time: A datetime.datetime object letting us know the current time.
+
+    Returns:
+      A bool saying True that we should fail, False otherwise.
+    """
+    # Check that we're at least past the start timeout.
+    builder_start_deadline = datetime.timedelta(
+        minutes=self.BUILDER_START_TIMEOUT)
+    past_deadline = current_time - self.start_time > builder_start_deadline
+
+    # Check that aside from the missing builders the rest have completed.
+    other_builders_completed = (
+        (len(self.GetMissing()) + len(self.GetCompleted())) ==
+        len(self.builders_array))
+
+    # Check that we have missing builders and logging who they are.
+    builders_are_missing = False
+    for builder in self.GetMissing():
+      logging.error('No status found for build config %s.', builder)
+      builders_are_missing = True
+
+    return past_deadline and other_builders_completed and builders_are_missing
+
+  def ShouldWait(self):
+    """Decides if we should continue to wait for the builders to finish.
+
+    This will be the retry function for timeout_util.WaitForSuccess, basically
+    this function will return False if all builders finished or we see a
+    problem with the builders.  Otherwise we'll return True to continue polling
+    for the builders statuses.
+
+    Returns:
+      A bool of True if we should continue to wait and False if we should not.
+    """
+    # Check if all builders completed.
+    if self.Completed():
+      return False
+
+    current_time = datetime.datetime.now()
+
+    # Guess there are some builders building, check if there is a problem.
+    if self.ShouldFailForBuilderStartTimeout(current_time):
+      logging.error('Ending build since at least one builder has not started '
+                    'within 5 mins.')
+      return False
+
+    # We got here which means no problems, we should still wait.
+    logging.info('Still waiting for the following builds to complete: %r',
+                 sorted(set(self.builders_array).difference(
+                     self.GetCompleted())))
+    return True
+
+
 class BuildSpecsManager(object):
   """A Class to manage buildspecs and their states."""
 
@@ -695,41 +815,23 @@
       A build_config name-> status dictionary of build statuses.
     """
     builders_completed = set()
-
-    def _GetStatusesFromDB():
-      """Helper function that iterates through current statuses."""
-      status_dict = self.GetSlaveStatusesFromCIDB(master_build_id)
-      for builder in set(builders_array) - set(status_dict.keys()):
-        logging.warning('No status found for build config %s.', builder)
-
-      latest_completed = set(
-          [b for b, s in status_dict.iteritems() if s in
-           constants.BUILDER_COMPLETED_STATUSES and b in builders_array])
-      for builder in sorted(latest_completed - builders_completed):
-        logging.info('Build config %s completed with status "%s".',
-                     builder, status_dict[builder])
-      builders_completed.update(latest_completed)
-
-      if len(builders_completed) < len(builders_array):
-        logging.info('Still waiting for the following builds to complete: %r',
-                     sorted(set(builders_array).difference(builders_completed)))
-        return None
-      else:
-        return 'Builds completed.'
+    start_time = datetime.datetime.now()
 
     def _PrintRemainingTime(remaining):
       logging.info('%s until timeout...', remaining)
 
     # Check for build completion until all builders report in.
+    builds_timed_out = False
     try:
-      builds_succeeded = timeout_util.WaitForSuccess(
-          lambda x: x is None,
-          _GetStatusesFromDB,
+      timeout_util.WaitForSuccess(
+          lambda statuses: statuses.ShouldWait(),
+          lambda: SlaveStatus(self.GetSlaveStatusesFromCIDB(master_build_id),
+                              start_time, builders_array, builders_completed),
           timeout,
           period=self.SLEEP_TIMEOUT,
           side_effect_func=_PrintRemainingTime)
     except timeout_util.TimeoutError:
-      builds_succeeded = None
+      builds_timed_out = True
 
     # Actually fetch the BuildStatus pickles from Google Storage.
     builder_statuses = {}
@@ -738,7 +840,7 @@
       builder_status = self.GetBuildStatus(builder, self.current_version)
       builder_statuses[builder] = builder_status
 
-    if not builds_succeeded:
+    if builds_timed_out:
       logging.error('Not all builds finished before timeout (%d minutes)'
                     ' reached.', int((timeout / 60) + 0.5))
 
diff --git a/cbuildbot/manifest_version_unittest.py b/cbuildbot/manifest_version_unittest.py
index 233d890..08411a2 100644
--- a/cbuildbot/manifest_version_unittest.py
+++ b/cbuildbot/manifest_version_unittest.py
@@ -6,7 +6,7 @@
 
 from __future__ import print_function
 
-import mox
+import datetime
 import os
 import tempfile
 
@@ -59,7 +59,7 @@
                     'Unable to create symlink to %s' % destfile)
 
 
-class VersionInfoTest(cros_test_lib.MoxTempDirTestCase):
+class VersionInfoTest(cros_test_lib.MockTempDirTestCase):
   """Test methods testing methods in VersionInfo class."""
 
   @classmethod
@@ -103,25 +103,23 @@
   def CommonTestIncrementVersion(self, incr_type, version, chrome_branch=None):
     """Common test increment.  Returns path to new incremented file."""
     message = 'Incrementing cuz I sed so'
-    self.mox.StubOutWithMock(git, 'CreateBranch')
-    self.mox.StubOutWithMock(manifest_version, '_PushGitChanges')
-    self.mox.StubOutWithMock(git, 'CleanAndCheckoutUpstream')
-
-    git.CreateBranch(self.tempdir, manifest_version.PUSH_BRANCH)
+    create_mock = self.PatchObject(git, 'CreateBranch')
+    push_mock = self.PatchObject(manifest_version, '_PushGitChanges')
+    clean_mock = self.PatchObject(git, 'CleanAndCheckoutUpstream')
 
     version_file = self.CreateFakeVersionFile(
         self.tempdir, version=version, chrome_branch=chrome_branch)
-
-    manifest_version._PushGitChanges(self.tempdir, message, dry_run=False,
-                                     push_to=None)
-
-    git.CleanAndCheckoutUpstream(self.tempdir)
-    self.mox.ReplayAll()
     info = manifest_version.VersionInfo(version_file=version_file,
                                         incr_type=incr_type)
     info.IncrementVersion()
     info.UpdateVersionFile(message, dry_run=False)
-    self.mox.VerifyAll()
+
+    create_mock.assert_called_once_with(
+        self.tempdir, manifest_version.PUSH_BRANCH)
+    push_mock.assert_called_once_with(
+        self.tempdir, message, dry_run=False, push_to=None)
+    clean_mock.assert_called_once_with(self.tempdir)
+
     return version_file
 
   def testIncrementVersionPatch(self):
@@ -154,8 +152,7 @@
     self.assertEqual(new_info.chrome_branch, '30')
 
 
-class BuildSpecsManagerTest(cros_test_lib.MoxTempDirTestCase,
-                            cros_test_lib.MockTestCase):
+class BuildSpecsManagerTest(cros_test_lib.MockTempDirTestCase):
   """Tests for the BuildSpecs manager."""
 
   def setUp(self):
@@ -184,7 +181,7 @@
     """Tests that PublishManifest writes a build id."""
     expected_message = ('Automatic: Start x86-generic-paladin master 1\n'
                         'CrOS-Build-Id: %s' % MOCK_BUILD_ID)
-    self.mox.StubOutWithMock(self.manager, 'PushSpecChanges')
+    push_mock = self.PatchObject(self.manager, 'PushSpecChanges')
 
     info = manifest_version.VersionInfo(
         FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
@@ -194,16 +191,14 @@
     osutils.Touch(m)
     self.manager.InitializeManifestVariables(info)
 
-    self.manager.PushSpecChanges(expected_message)
-
-    self.mox.ReplayAll()
     self.manager.PublishManifest(m, '1', build_id=MOCK_BUILD_ID)
-    self.mox.VerifyAll()
+
+    push_mock.assert_called_once_with(expected_message)
 
   def testPublishManifestCommitMessageWithNegativeBuildId(self):
     """Tests that PublishManifest doesn't write a negative build_id"""
     expected_message = 'Automatic: Start x86-generic-paladin master 1'
-    self.mox.StubOutWithMock(self.manager, 'PushSpecChanges')
+    push_mock = self.PatchObject(self.manager, 'PushSpecChanges')
 
     info = manifest_version.VersionInfo(
         FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
@@ -213,16 +208,14 @@
     osutils.Touch(m)
     self.manager.InitializeManifestVariables(info)
 
-    self.manager.PushSpecChanges(expected_message)
-
-    self.mox.ReplayAll()
     self.manager.PublishManifest(m, '1', build_id=-1)
-    self.mox.VerifyAll()
+
+    push_mock.assert_called_once_with(expected_message)
 
   def testPublishManifestCommitMessageWithNoneBuildId(self):
     """Tests that PublishManifest doesn't write a non-existant build_id"""
     expected_message = 'Automatic: Start x86-generic-paladin master 1'
-    self.mox.StubOutWithMock(self.manager, 'PushSpecChanges')
+    push_mock = self.PatchObject(self.manager, 'PushSpecChanges')
 
     info = manifest_version.VersionInfo(
         FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
@@ -232,11 +225,9 @@
     osutils.Touch(m)
     self.manager.InitializeManifestVariables(info)
 
-    self.manager.PushSpecChanges(expected_message)
-
-    self.mox.ReplayAll()
     self.manager.PublishManifest(m, '1')
-    self.mox.VerifyAll()
+
+    push_mock.assert_called_once_with(expected_message)
 
   def testLoadSpecs(self):
     """Tests whether we can load specs correctly."""
@@ -262,16 +253,14 @@
         for_build, 'fail', CHROME_BRANCH, os.path.basename(m1)))
     manifest_version.CreateSymlink(m1, os.path.join(
         for_build, 'pass', CHROME_BRANCH, os.path.basename(m2)))
-    self.mox.StubOutWithMock(self.manager, 'GetBuildStatus')
-    self.manager.GetBuildStatus(self.build_names[0], '1.2.5').AndReturn(missing)
-    self.mox.ReplayAll()
+    m = self.PatchObject(self.manager, 'GetBuildStatus', return_value=missing)
     self.manager.InitializeManifestVariables(info)
-    self.mox.VerifyAll()
     self.assertEqual(self.manager.latest_unprocessed, '1.2.5')
+    m.assert_called_once_with(self.build_names[0], '1.2.5')
 
   def testLatestSpecFromDir(self):
     """Tests whether we can get sorted specs correctly from a directory."""
-    self.mox.StubOutWithMock(repository, 'CloneGitRepo')
+    self.PatchObject(repository, 'CloneGitRepo', side_effect=Exception())
     info = manifest_version.VersionInfo(
         '99.1.2', CHROME_BRANCH, incr_type='branch')
 
@@ -286,9 +275,7 @@
     for m in [m1, m2, m3, m4]:
       osutils.Touch(m)
 
-    self.mox.ReplayAll()
     spec = self.manager._LatestSpecFromDir(info, specs_dir)
-    self.mox.VerifyAll()
     # Should be the latest on the 99.1 branch.
     self.assertEqual(spec, '99.1.10')
 
@@ -301,26 +288,22 @@
         FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
 
     self.manager.latest = None
-    self.mox.ReplayAll()
     version = self.manager.GetNextVersion(info)
-    self.mox.VerifyAll()
     self.assertEqual(FAKE_VERSION_STRING, version)
 
   def testGetNextVersionIncrement(self):
     """Tests that we create a new version if a previous one exists."""
-    self.mox.StubOutWithMock(manifest_version.VersionInfo, 'UpdateVersionFile')
+    m = self.PatchObject(manifest_version.VersionInfo, 'UpdateVersionFile')
     version_file = VersionInfoTest.CreateFakeVersionFile(self.tempdir)
     info = manifest_version.VersionInfo(version_file=version_file,
                                         incr_type='branch')
-    info.UpdateVersionFile(
-        'Automatic: %s - Updating to a new version number from %s' % (
-            self.build_names[0], FAKE_VERSION_STRING), dry_run=True)
 
     self.manager.latest = FAKE_VERSION_STRING
-    self.mox.ReplayAll()
     version = self.manager.GetNextVersion(info)
-    self.mox.VerifyAll()
     self.assertEqual(FAKE_VERSION_STRING_NEXT, version)
+    m.assert_called_once_with(
+        'Automatic: %s - Updating to a new version number from %s' % (
+            self.build_names[0], FAKE_VERSION_STRING), dry_run=True)
 
   def testGetNextBuildSpec(self):
     """End-to-end test of updating the manifest."""
@@ -333,10 +316,8 @@
     rc = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
     rc.SetDefaultCmdResult()
 
-    self.mox.ReplayAll()
     self.manager.GetNextBuildSpec(retries=0)
     self.manager.UpdateStatus({self.build_names[0]: True})
-    self.mox.VerifyAll()
 
   def testUnpickleBuildStatus(self):
     """Tests that _UnpickleBuildStatus returns the correct values."""
@@ -366,25 +347,19 @@
       builders: List of builders to get status for.
       status_runs: List of dictionaries of expected build and status.
     """
-    self.mox.StubOutWithMock(manifest_version.BuildSpecsManager,
-                             'GetSlaveStatusesFromCIDB')
-    self.mox.StubOutWithMock(manifest_version.BuildSpecsManager,
-                             'GetBuildStatus')
-    for status_dict in status_runs:
-      manifest_version.BuildSpecsManager.GetSlaveStatusesFromCIDB(
-          mox.IgnoreArg()).AndReturn(status_dict)
+    self.PatchObject(manifest_version.BuildSpecsManager,
+                     'GetSlaveStatusesFromCIDB', side_effect=status_runs)
 
     final_status_dict = status_runs[-1]
-    for builder in builders:
-      status = manifest_version.BuilderStatus(
-          final_status_dict.get(builder), None)
-      manifest_version.BuildSpecsManager.GetBuildStatus(
-          builder, mox.IgnoreArg()).AndReturn(status)
+    build_statuses = [
+        manifest_version.BuilderStatus(final_status_dict.get(x), None)
+        for x in builders
+    ]
+    self.PatchObject(manifest_version.BuildSpecsManager,
+                     'GetBuildStatus',
+                     side_effect=build_statuses)
 
-    self.mox.ReplayAll()
-    statuses = self.manager.GetBuildersStatus(mox.IgnoreArg, builders)
-    self.mox.VerifyAll()
-    return statuses
+    return self.manager.GetBuildersStatus('builderid', builders)
 
   def testGetBuildersStatusBothFinished(self):
     """Tests GetBuilderStatus where both builds have finished."""
@@ -405,3 +380,139 @@
     statuses = self._GetBuildersStatus(['build1', 'build2'], status_runs)
     self.assertTrue(statuses['build1'].Failed())
     self.assertTrue(statuses['build2'].Passed())
+
+
+class SlaveStatusTest(cros_test_lib.TestCase):
+  """Test methods testing methods in SalveStatus class."""
+
+  def testGetMissing(self):
+    """Tests GetMissing returns the missing builders."""
+    status = {'build1': constants.BUILDER_STATUS_FAILED,
+              'build2': constants.BUILDER_STATUS_INFLIGHT}
+    builders_array = ['build1', 'build2', 'missing_builder']
+    slaveStatus = manifest_version.SlaveStatus(status, datetime.datetime.now(),
+                                               builders_array, set())
+
+    self.assertEqual(slaveStatus.GetMissing(), ['missing_builder'])
+
+  def testGetMissingNone(self):
+    """Tests GetMissing returns nothing when all builders are accounted for."""
+    status = {'build1': constants.BUILDER_STATUS_FAILED,
+              'build2': constants.BUILDER_STATUS_INFLIGHT}
+    builders_array = ['build1', 'build2']
+    slaveStatus = manifest_version.SlaveStatus(status, datetime.datetime.now(),
+                                               builders_array, set())
+
+    self.assertEqual(slaveStatus.GetMissing(), [])
+
+  def testGetCompleted(self):
+    """Tests GetCompleted returns the right builders that have completed."""
+    status = {'passed': constants.BUILDER_STATUS_PASSED,
+              'failed': constants.BUILDER_STATUS_FAILED,
+              'aborted': constants.BUILDER_STATUS_ABORTED,
+              'skipped': constants.BUILDER_STATUS_SKIPPED,
+              'forgiven': constants.BUILDER_STATUS_FORGIVEN,
+              'inflight': constants.BUILDER_STATUS_INFLIGHT,
+              'missing': constants.BUILDER_STATUS_MISSING,
+              'planned': constants.BUILDER_STATUS_PLANNED}
+    builders_array = ['passed', 'failed', 'aborted', 'skipped', 'forgiven',
+                      'inflight', 'missing', 'planning']
+    previous_completed = set(['passed'])
+    expected_completed = set(['passed', 'failed', 'aborted', 'skipped',
+                              'forgiven'])
+    slaveStatus = manifest_version.SlaveStatus(status,
+                                               datetime.datetime.now(),
+                                               builders_array,
+                                               previous_completed)
+
+    self.assertEqual(sorted(slaveStatus.GetCompleted()),
+                     sorted(['passed', 'failed', 'aborted', 'skipped',
+                             'forgiven']))
+    self.assertEqual(slaveStatus.previous_completed, expected_completed)
+
+  def testCompleted(self):
+    """Tests Completed returns proper bool."""
+    statusNotCompleted = {'build1': constants.BUILDER_STATUS_FAILED,
+                          'build2': constants.BUILDER_STATUS_INFLIGHT}
+    statusCompleted = {'build1': constants.BUILDER_STATUS_FAILED,
+                       'build2': constants.BUILDER_STATUS_PASSED}
+    builders_array = ['build1', 'build2']
+    slaveStatusNotCompleted = manifest_version.SlaveStatus(
+        statusNotCompleted, datetime.datetime.now(), builders_array, set())
+    slaveStatusCompleted = manifest_version.SlaveStatus(
+        statusCompleted, datetime.datetime.now(), builders_array, set())
+
+    self.assertFalse(slaveStatusNotCompleted.Completed())
+    self.assertTrue(slaveStatusCompleted.Completed())
+
+  def testShouldFailForBuilderStartTimeoutTrue(self):
+    """Tests that ShouldFailForBuilderStartTimeout says fail when it should."""
+    status = {'build1': constants.BUILDER_STATUS_FAILED}
+    start_time = datetime.datetime.now()
+    builders_array = ['build1', 'build2']
+    slaveStatus = manifest_version.SlaveStatus(status, start_time,
+                                               builders_array, set())
+    check_time = start_time + datetime.timedelta(
+        minutes=slaveStatus.BUILDER_START_TIMEOUT + 1)
+
+    self.assertTrue(slaveStatus.ShouldFailForBuilderStartTimeout(check_time))
+
+  def testShouldFailForBuilderStartTimeoutFalseTooEarly(self):
+    """Tests that ShouldFailForBuilderStartTimeout doesn't fail.
+
+    Make sure that we don't fail if there are missing builders but we're
+    checking before the timeout and the other builders have completed.
+    """
+    status = {'build1': constants.BUILDER_STATUS_FAILED}
+    start_time = datetime.datetime.now()
+    builders_array = ['build1', 'build2']
+    slaveStatus = manifest_version.SlaveStatus(status, start_time,
+                                               builders_array, set())
+
+    self.assertFalse(slaveStatus.ShouldFailForBuilderStartTimeout(start_time))
+
+  def testShouldFailForBuilderStartTimeoutFalseNotCompleted(self):
+    """Tests that ShouldFailForBuilderStartTimeout doesn't fail.
+
+    Make sure that we don't fail if there are missing builders and we're
+    checking after the timeout but the other builders haven't completed.
+    """
+    status = {'build1': constants.BUILDER_STATUS_INFLIGHT}
+    start_time = datetime.datetime.now()
+    builders_array = ['build1', 'build2']
+    slaveStatus = manifest_version.SlaveStatus(status, start_time,
+                                               builders_array, set())
+    check_time = start_time + datetime.timedelta(
+        minutes=slaveStatus.BUILDER_START_TIMEOUT + 1)
+
+    self.assertFalse(slaveStatus.ShouldFailForBuilderStartTimeout(check_time))
+
+  def testShouldWaitAllBuildersCompleted(self):
+    """Tests that ShouldWait says no waiting because all builders finished."""
+    status = {'build1': constants.BUILDER_STATUS_FAILED,
+              'build2': constants.BUILDER_STATUS_PASSED}
+    builders_array = ['build1', 'build2']
+    slaveStatus = manifest_version.SlaveStatus(status, datetime.datetime.now(),
+                                               builders_array, set())
+
+    self.assertFalse(slaveStatus.ShouldWait())
+
+  def testShouldWaitMissingBuilder(self):
+    """Tests that ShouldWait says no waiting because a builder is missing."""
+    status = {'build1': constants.BUILDER_STATUS_FAILED}
+    builders_array = ['build1', 'build2']
+    start_time = datetime.datetime.now() - datetime.timedelta(hours=1)
+    slaveStatus = manifest_version.SlaveStatus(status, start_time,
+                                               builders_array, set())
+
+    self.assertFalse(slaveStatus.ShouldWait())
+
+  def testShouldWaitBuildersStillBuilding(self):
+    """Tests that ShouldWait says to wait because builders still building."""
+    status = {'build1': constants.BUILDER_STATUS_INFLIGHT,
+              'build2': constants.BUILDER_STATUS_FAILED}
+    builders_array = ['build1', 'build2']
+    slaveStatus = manifest_version.SlaveStatus(status, datetime.datetime.now(),
+                                               builders_array, set())
+
+    self.assertTrue(slaveStatus.ShouldWait())
diff --git a/lib/brick_lib_unittest b/cbuildbot/results_lib_unittest
similarity index 100%
rename from lib/brick_lib_unittest
rename to cbuildbot/results_lib_unittest
diff --git a/cbuildbot/stages/stage_results_unittest.py b/cbuildbot/results_lib_unittest.py
similarity index 100%
rename from cbuildbot/stages/stage_results_unittest.py
rename to cbuildbot/results_lib_unittest.py
diff --git a/cbuildbot/run_tests.py b/cbuildbot/run_tests.py
index 2c17905..0d0ae28 100644
--- a/cbuildbot/run_tests.py
+++ b/cbuildbot/run_tests.py
@@ -13,6 +13,7 @@
 from __future__ import print_function
 
 import errno
+import json
 import multiprocessing
 import os
 import signal
@@ -28,6 +29,7 @@
 from chromite.lib import gs
 from chromite.lib import namespaces
 from chromite.lib import osutils
+from chromite.lib import path_util
 from chromite.lib import proctitle
 from chromite.lib import timeout_util
 
@@ -43,6 +45,12 @@
 CTRL_C_TIMEOUT = SIGINT_TIMEOUT + 5
 
 
+# The cache file holds various timing information.  This is used later on to
+# optimistically sort tests so the slowest ones run first.  That way we don't
+# wait until all of the fast ones finish before we launch the slow ones.
+TIMING_CACHE_FILE = None
+
+
 # Test has to run inside the chroot.
 INSIDE = 'inside'
 
@@ -60,11 +68,13 @@
     'cli/cros/cros_build_unittest': INSIDE,
     'cli/cros/cros_chroot_unittest': INSIDE,
     'cli/cros/cros_debug_unittest': INSIDE,
+    'cli/cros/cros_payload_unittest': INSIDE,
     'cli/cros/lint_unittest': INSIDE,
     'cli/deploy_unittest': INSIDE,
     'lib/alerts_unittest': INSIDE,
     'lib/chroot_util_unittest': INSIDE,
     'lib/filetype_unittest': INSIDE,
+    'lib/paygen/paygen_payload_lib_unittest': INSIDE,
     'lib/upgrade_table_unittest': INSIDE,
     'mobmonitor/checkfile/manager_unittest': INSIDE,
     'mobmonitor/scripts/mobmonitor_unittest': INSIDE,
@@ -127,6 +137,17 @@
         msg = 'Finished'
       func('%s [%i/%i] %s (%s)', msg, finished.value, total, test, delta)
 
+      # Save the timing for this test run for future usage.
+      seconds = delta.total_seconds()
+      try:
+        cache = json.load(open(TIMING_CACHE_FILE))
+      except (IOError, ValueError):
+        cache = {}
+      if test in cache:
+        seconds = (cache[test] + seconds) / 2
+      cache[test] = seconds
+      json.dump(cache, open(TIMING_CACHE_FILE, 'w'))
+
   ret = cros_build_lib.TimedCommand(
       cros_build_lib.RunCommand, cmd, capture_output=True, error_code_ok=True,
       combine_stdout_stderr=True, debug_level=logging.DEBUG,
@@ -140,7 +161,69 @@
   return ret.returncode
 
 
-def BuildTestSets(tests, chroot_available, network):
+def SortTests(tests, jobs=1, timing_cache_file=None):
+  """Interleave the slowest & fastest
+
+  Hopefully we can pipeline the overall process better by queueing the slowest
+  tests first while also using half the slots for fast tests.  We don't need
+  the timing info to be exact, just ballpark.
+
+  Args:
+    tests: The list of tests to sort.
+    jobs: How many jobs will we run in parallel.
+    timing_cache_file: Where to read test timing info.
+
+  Returns:
+    The tests ordered for best execution timing (we hope).
+  """
+  if timing_cache_file is None:
+    timing_cache_file = TIMING_CACHE_FILE
+
+  # Usually |tests| will be a generator -- break it down.
+  tests = list(tests)
+
+  # If we have enough spare cpus to crunch the jobs, just do so.
+  if len(tests) <= jobs:
+    return tests
+
+  # Create a dict mapping tests to their timing information using the cache.
+  try:
+    with cros_build_lib.Open(timing_cache_file) as f:
+      cache = json.load(f)
+  except (IOError, ValueError):
+    cache = {}
+
+  # Sort the cached list of tests from slowest to fastest.
+  sorted_tests = [test for (test, _timing) in
+                  sorted(cache.iteritems(), key=lambda x: x[1], reverse=True)]
+  # Then extract the tests from the cache list that we care about -- remember
+  # that the cache could be stale and contain tests that no longer exist, or
+  # the user only wants to run a subset of tests.
+  ret = []
+  for test in sorted_tests:
+    if test in tests:
+      ret.append(test)
+      tests.remove(test)
+  # Any tests not in the cache we just throw on the end.  No real way to
+  # predict their speed ahead of time, and we'll get useful data when they
+  # run the test a second time.
+  ret += tests
+
+  # Now interleave the fast & slow tests so every other one mixes.
+  # On systems with fewer cores, this can help out in two ways:
+  # (1) Better utilization of resources when some slow tests are I/O or time
+  #     bound, so the other cores can spawn/fork fast tests faster (generally).
+  # (2) If there is common code that is broken, we get quicker feedback if we
+  #     churn through the fast tests.
+  # Worse case, this interleaving doesn't slow things down overall.
+  fast = ret[:int(round(len(ret) / 2.0)) - 1:-1]
+  slow = ret[:-len(fast)]
+  ret[::2] = slow
+  ret[1::2] = fast
+  return ret
+
+
+def BuildTestSets(tests, chroot_available, network, jobs=1):
   """Build the tests to execute.
 
   Take care of special test handling like whether it needs to be inside or
@@ -150,12 +233,13 @@
     tests: List of tests to execute.
     chroot_available: Whether we can execute tests inside the sdk.
     network: Whether to execute network tests.
+    jobs: How many jobs will we run in parallel.
 
   Returns:
     List of tests to execute and their full command line.
   """
   testsets = []
-  for test in tests:
+  for test in SortTests(tests, jobs=jobs):
     cmd = [test]
 
     # See if this test requires special consideration.
@@ -223,7 +307,7 @@
   # Launch all the tests!
   try:
     # Build up the testsets.
-    testsets = BuildTestSets(tests, chroot_available, network)
+    testsets = BuildTestSets(tests, chroot_available, network, jobs=jobs)
 
     # Fork each test and add it to the list.
     for test, cmd, tmpfile in testsets:
@@ -435,6 +519,10 @@
   if opts.quick:
     SPECIAL_TESTS.update(SLOW_TESTS)
 
+  global TIMING_CACHE_FILE  # pylint: disable=global-statement
+  TIMING_CACHE_FILE = os.path.join(
+      path_util.GetCacheDir(), constants.COMMON_CACHE, 'run_tests.cache.json')
+
   jobs = opts.jobs or multiprocessing.cpu_count()
 
   with cros_build_lib.ContextManagerStack() as stack:
diff --git a/cbuildbot/run_tests_unittest.py b/cbuildbot/run_tests_unittest.py
index 4432999..1323909 100644
--- a/cbuildbot/run_tests_unittest.py
+++ b/cbuildbot/run_tests_unittest.py
@@ -56,6 +56,38 @@
     self.assertEqual(list(found), [])
 
 
+class SortTest(cros_test_lib.TempDirTestCase):
+  """Tests for the SortTests() func"""
+
+  def SortTests(self, tests, **kwargs):
+    """Helper to set cache file to a local temp one"""
+    kwargs['timing_cache_file'] = os.path.join(self.tempdir, 'cache.json')
+    return run_tests.SortTests(tests, **kwargs)
+
+  def testEmpty(self):
+    """Verify handling of empty test lists"""
+    self.SortTests([])
+    self.SortTests([], jobs=100)
+
+  def testSmallSet(self):
+    """Do nothing when number of tests is lower than number of jobs."""
+    tests = ['small', 'test', 'list', 'is', 'ignored']
+    ret = self.SortTests(tests, jobs=100)
+    self.assertEqual(tests, ret)
+
+  def testOddSet(self):
+    """Verify we can sort odd number of tests."""
+    tests = ['1', '2', '3']
+    ret = self.SortTests(tests, jobs=1)
+    self.assertEqual(set(tests), set(ret))
+
+  def testEvenSet(self):
+    """Verify we can sort even number of tests."""
+    tests = ['1', '2', '3', '4']
+    ret = self.SortTests(tests, jobs=1)
+    self.assertEqual(set(tests), set(ret))
+
+
 class MainTest(cros_test_lib.MockOutputTestCase):
   """Tests for the main() func"""
 
diff --git a/cbuildbot/stages/artifact_stages.py b/cbuildbot/stages/artifact_stages.py
index 0c5803f..e3f4915 100644
--- a/cbuildbot/stages/artifact_stages.py
+++ b/cbuildbot/stages/artifact_stages.py
@@ -179,7 +179,6 @@
     #             \- ArchiveStandaloneArtifact
     #          \- ArchiveZipFiles
     #          \- ArchiveHWQual
-    #          \- ArchiveGceTarballs
     #       \- PushImage (blocks on BuildAndArchiveAllImages)
     #    \- ArchiveManifest
     #    \- ArchiveStrippedPackages
@@ -237,30 +236,6 @@
         parallel.RunTasksInProcessPool(ArchiveStandaloneArtifact,
                                        [[x] for x in self.artifacts])
 
-    def ArchiveGceTarballs():
-      """Creates .tar.gz files that can be converted to GCE images.
-
-      These files will be uploaded to GCS buckets, where they can be
-      used as input to the "gcloud compute images create" command.
-      This will convert them into images that can be used to create
-      GCE VM instances.
-      """
-      image_bins = []
-      if 'base' in config['images']:
-        image_bins.append(constants.IMAGE_TYPE_TO_NAME['base'])
-      if 'test' in config['images']:
-        image_bins.append(constants.IMAGE_TYPE_TO_NAME['test'])
-
-      for image_bin in image_bins:
-        if not os.path.exists(os.path.join(image_dir, image_bin)):
-          logging.warning('Missing image file skipped: %s', image_bin)
-          continue
-        output_file = commands.BuildGceTarball(
-            archive_path, image_dir, image_bin)
-        self._release_upload_queue.put([output_file])
-
-      self.board_runattrs.SetParallel('gce_tarball_generated', True)
-
     def ArchiveZipFiles():
       """Build and archive zip files.
 
@@ -315,12 +290,16 @@
       # For recovery image to be generated correctly, BuildRecoveryImage must
       # run before BuildAndArchiveFactoryImages.
       if 'recovery' in config.images:
-        assert self.IsArchivedFile(constants.BASE_IMAGE_BIN)
+        assert os.path.isfile(os.path.join(image_dir, constants.BASE_IMAGE_BIN))
         commands.BuildRecoveryImage(buildroot, board, image_dir, extra_env)
         self._recovery_image_status_queue.put(True)
-        # Re-generate the artifacts list so we include the newly created
-        # recovery image.
-        self.LoadArtifactsList(self._current_board, image_dir)
+        recovery_image = constants.RECOVERY_IMAGE_BIN
+        if not self.IsArchivedFile(recovery_image):
+          info = {'paths': [recovery_image],
+                  'input': [recovery_image],
+                  'archive': 'tar',
+                  'compress': 'xz'}
+          self.artifacts.append(info)
       else:
         self._recovery_image_status_queue.put(False)
 
@@ -331,8 +310,6 @@
             ArchiveStandaloneArtifacts,
             ArchiveZipFiles,
         ]
-        if config['upload_gce_images']:
-          steps.append(ArchiveGceTarballs)
         parallel.RunParallelSteps(steps)
 
     def ArchiveImageScripts():
@@ -358,10 +335,8 @@
       # TODO: When we support branches fully, the friendly name of the branch
       # needs to be used with PushImages
       sign_types = []
-      if config['name'].endswith('-%s' % config_lib.CONFIG_TYPE_FIRMWARE):
-        sign_types += ['firmware']
-      if config['name'].endswith('-%s' % config_lib.CONFIG_TYPE_FACTORY):
-        sign_types += ['factory']
+      if config['sign_types']:
+        sign_types = config['sign_types']
       urls = commands.PushImages(
           board=board,
           archive_url=upload_url,
diff --git a/cbuildbot/stages/build_stages.py b/cbuildbot/stages/build_stages.py
index 44b721e..9533887 100644
--- a/cbuildbot/stages/build_stages.py
+++ b/cbuildbot/stages/build_stages.py
@@ -205,17 +205,14 @@
           self._build_root, toolchain_boards=[self._current_board],
           usepkg=usepkg_toolchain)
 
-    # Only update the board if we need to do so.
-    chroot_path = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
-    board_path = os.path.join(chroot_path, 'build', self._current_board)
-    if not os.path.isdir(board_path) or self._run.config.board_replace:
-      usepkg = self._run.config.usepkg_build_packages
-      commands.SetupBoard(
-          self._build_root, board=self._current_board, usepkg=usepkg,
-          chrome_binhost_only=self._run.config.chrome_binhost_only,
-          force=self._run.config.board_replace,
-          extra_env=self._portage_extra_env, chroot_upgrade=False,
-          profile=self._run.options.profile or self._run.config.profile)
+    # Always update the board.
+    usepkg = self._run.config.usepkg_build_packages
+    commands.SetupBoard(
+        self._build_root, board=self._current_board, usepkg=usepkg,
+        chrome_binhost_only=self._run.config.chrome_binhost_only,
+        force=self._run.config.board_replace,
+        extra_env=self._portage_extra_env, chroot_upgrade=False,
+        profile=self._run.options.profile or self._run.config.profile)
 
 
 class BuildPackagesStage(generic_stages.BoardSpecificBuilderStage,
@@ -379,7 +376,8 @@
     self.board_runattrs.SetParallel('images_generated', True)
 
     parallel.RunParallelSteps(
-        [self._BuildVMImage, lambda: self._GenerateAuZip(cbuildbot_image_link)])
+        [self._BuildVMImage, lambda: self._GenerateAuZip(cbuildbot_image_link),
+         self._BuildGceTarballs])
 
   def _BuildVMImage(self):
     if self._run.config.vm_tests and not self._afdo_generate_min:
@@ -395,6 +393,28 @@
                              image_dir,
                              extra_env=self._portage_extra_env)
 
+  def _BuildGceTarballs(self):
+    """Creates .tar.gz files that can be converted to GCE images.
+
+    These files will be used by VMTestStage for tests on GCE. They will also be
+    be uploaded to GCS buckets, where they can be used as input to the "gcloud
+    compute images create" command. This will convert them into images that can
+    be used to create GCE VM instances.
+    """
+    if self._run.config.upload_gce_images:
+      image_bins = []
+      if 'base' in self._run.config['images']:
+        image_bins.append(constants.IMAGE_TYPE_TO_NAME['base'])
+      if 'test' in self._run.config['images']:
+        image_bins.append(constants.IMAGE_TYPE_TO_NAME['test'])
+
+      image_dir = self.GetImageDirSymlink('latest')
+      for image_bin in image_bins:
+        if os.path.exists(os.path.join(image_dir, image_bin)):
+          commands.BuildGceTarball(image_dir, image_dir, image_bin)
+        else:
+          logging.warning('Missing image file skipped: %s', image_bin)
+
   def _HandleStageException(self, exc_info):
     """Tell other stages to not wait on us if we die for some reason."""
     self.board_runattrs.SetParallelDefault('images_generated', False)
diff --git a/cbuildbot/stages/build_stages_unittest.py b/cbuildbot/stages/build_stages_unittest.py
index 2f1c4a1..ff239b2 100644
--- a/cbuildbot/stages/build_stages_unittest.py
+++ b/cbuildbot/stages/build_stages_unittest.py
@@ -95,7 +95,7 @@
     self._Run(dir_exists)
     self.assertCommandContains(['./update_chroot'])
     cmd = ['./setup_board', '--board=%s' % self._current_board, '--nousepkg']
-    self.assertCommandContains(cmd, expected=not dir_exists)
+    self.assertCommandContains(cmd)
     cmd = ['./setup_board', '--skip_chroot_upgrade']
     self.assertCommandContains(cmd)
 
@@ -118,30 +118,12 @@
                        self._run.options.latest_toolchain)
     self.assertCommandContains(['./update_chroot', '--nousepkg'],
                                expected=update_nousepkg)
-    run_setup_board = not dir_exists or self._run.config.board_replace
-    self.assertCommandContains(['./setup_board'], expected=run_setup_board)
+    self.assertCommandContains(['./setup_board'])
     cmd = ['./setup_board', '--skip_chroot_upgrade']
-    self.assertCommandContains(cmd, expected=run_setup_board)
+    self.assertCommandContains(cmd)
     cmd = ['./setup_board', '--nousepkg']
     self.assertCommandContains(
-        cmd,
-        expected=run_setup_board and not self._run.config.usepkg_build_packages)
-
-  def testBinBuildWithBoard(self):
-    """Tests whether we don't create the board when it's there."""
-    self._PrepareBin()
-    self._RunBin(dir_exists=True)
-
-  def testBinBuildWithBoardReplace(self):
-    """Tests whether we don't create the board when it's there."""
-    self._PrepareBin()
-    self._run.config.board_replace = True
-    self._RunBin(dir_exists=True)
-
-  def testBinBuildWithMissingBoard(self):
-    """Tests whether we create the board when it's missing."""
-    self._PrepareBin()
-    self._RunBin(dir_exists=False)
+        cmd, not self._run.config.usepkg_build_packages)
 
   def testBinBuildWithLatestToolchain(self):
     """Tests whether we use --nousepkg for creating the board."""
diff --git a/cbuildbot/stages/chrome_stages_unittest.py b/cbuildbot/stages/chrome_stages_unittest.py
index b1504a0..48bb16f 100644
--- a/cbuildbot/stages/chrome_stages_unittest.py
+++ b/cbuildbot/stages/chrome_stages_unittest.py
@@ -112,7 +112,7 @@
                           cros_build_lib_unittest.RunCommandTestCase):
   """Tests for SyncChromeStage."""
 
-  # pylint: disable-msg=protected-access
+  # pylint: disable=protected-access
   def setUp(self):
     self._Prepare()
     self.PatchObject(cbuildbot_run._BuilderRunBase, 'DetermineChromeVersion',
@@ -126,4 +126,3 @@
     """Basic syntax sanity test."""
     stage = self.ConstructStage()
     stage.PerformStage()
-
diff --git a/cbuildbot/stages/completion_stages.py b/cbuildbot/stages/completion_stages.py
index 2c6b08d..e1467d3 100644
--- a/cbuildbot/stages/completion_stages.py
+++ b/cbuildbot/stages/completion_stages.py
@@ -18,9 +18,7 @@
 from chromite.cbuildbot.stages import sync_stages
 from chromite.lib import clactions
 from chromite.lib import cros_logging as logging
-from chromite.lib import git
 from chromite.lib import patch as cros_patch
-from chromite.lib import portage_util
 
 
 def GetBuilderSuccessMap(builder_run, overall_success):
@@ -182,8 +180,8 @@
         timeout = 3 * 60
 
       manager = self._run.attrs.manifest_manager
-      if sync_stages.MasterSlaveLKGMSyncStage.sub_manager:
-        manager = sync_stages.MasterSlaveLKGMSyncStage.sub_manager
+      if sync_stages.MasterSlaveLKGMSyncStage.external_manager:
+        manager = sync_stages.MasterSlaveLKGMSyncStage.external_manager
       slave_statuses.update(manager.GetBuildersStatus(
           self._run.attrs.metadata.GetValue('build_id'),
           builder_names,
@@ -223,8 +221,8 @@
         self._run.manifest_branch == 'master' and
         self._run.config.build_type != constants.CHROME_PFQ_TYPE):
       self._run.attrs.manifest_manager.PromoteCandidate()
-      if sync_stages.MasterSlaveLKGMSyncStage.sub_manager:
-        sync_stages.MasterSlaveLKGMSyncStage.sub_manager.PromoteCandidate()
+      if sync_stages.MasterSlaveLKGMSyncStage.external_manager:
+        sync_stages.MasterSlaveLKGMSyncStage.external_manager.PromoteCandidate()
 
   def HandleFailure(self, failing, inflight, no_stat):
     """Handle a build failure.
@@ -485,11 +483,6 @@
   def HandleSuccess(self):
     if self._run.config.master:
       self.sync_stage.pool.SubmitPool(reason=constants.STRATEGY_CQ_SUCCESS)
-      # After submitting the pool, update the commit hashes for uprevved
-      # ebuilds.
-      manifest = git.ManifestCheckout.Cached(self._build_root)
-      portage_util.EBuild.UpdateCommitHashesForChanges(
-          self.sync_stage.pool.changes, self._build_root, manifest)
       if config_lib.IsPFQType(self._run.config.build_type):
         super(CommitQueueCompletionStage, self).HandleSuccess()
 
@@ -830,10 +823,17 @@
     overlays, push_overlays = self._ExtractOverlays()
     assert push_overlays, 'push_overlays must be set to run this stage'
 
-    # If the build failed, we don't want to push our local changes, because
-    # they might include some CLs that failed. Instead, clean up our local
-    # changes and do a fresh uprev.
-    if not self.success:
+    # If we're a commit queue, we should clean out our local changes, resync,
+    # and reapply our uprevs. This is necessary so that 1) we are sure to point
+    # at the remote SHA1s, not our local SHA1s; 2) we can avoid doing a
+    # rebase; 3) in the case of failure, we don't submit the changes that were
+    # committed locally.
+    #
+    # If we're not a commit queue and the build succeeded, we can skip the
+    # cleanup here. This is a cheap trick so that the Chrome PFQ pushes its
+    # earlier uprev from the SyncChrome stage (it would be a bit tricky to
+    # replicate the uprev here, so we'll leave it alone).
+    if config_lib.IsCQType(self._run.config.build_type) or not self.success:
       # Clean up our root and sync down the latest changes that were
       # submitted.
       commands.BuildRootGitCleanup(self._build_root)
diff --git a/cbuildbot/stages/report_stages.py b/cbuildbot/stages/report_stages.py
index c89a38f..badf6c2 100644
--- a/cbuildbot/stages/report_stages.py
+++ b/cbuildbot/stages/report_stages.py
@@ -47,7 +47,7 @@
 
   In particular, this method does not write any metadata values that depend
   on the builder config, as the config may be modified by patches that are
-  applied before the final reexectuion.
+  applied before the final reexectuion. (exception: the config's name itself)
 
   This method is safe to run more than once (for instance, once per cbuildbot
   execution) because it will write the same data each time.
@@ -135,6 +135,15 @@
                                 self._run.config['doc'])
 
     WriteBasicMetadata(self._run)
+
+    # This is a heuristic value for |important|, since patches that get applied
+    # later in the build might change the config. We write it now anyway,
+    # because in case the build fails before Sync, it is better to have this
+    # heuristic value than None. In BuildReexectuionFinishedStage, we re-write
+    # the definitive value.
+    self._run.attrs.metadata.UpdateWithDict(
+        {'important': self._run.config['important']})
+
     d = self._run.attrs.metadata.GetDict()
 
     # BuildStartStage should only run once per build. But just in case it
@@ -165,7 +174,8 @@
             build_config=d['bot-config'],
             bot_hostname=d['bot-hostname'],
             master_build_id=d['master_build_id'],
-            timeout_seconds=self._GetBuildTimeoutSeconds())
+            timeout_seconds=self._GetBuildTimeoutSeconds(),
+            important=d['important'])
         self._run.attrs.metadata.UpdateWithDict({'build_id': build_id,
                                                  'db_type': db_type})
         logging.info('Inserted build_id %s into cidb database type %s.',
@@ -202,6 +212,51 @@
                              '%s.' % (metadata_dict['db_type'], db_type))
 
 
+class SlaveFailureSummaryStage(generic_stages.BuilderStage):
+  """Stage which summarizes and links to the failures of slave builds."""
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    if not self._run.config.master:
+      logging.info('This stage is only meaningful for master builds. '
+                   'Doing nothing.')
+      return
+
+    build_id, db = self._run.GetCIDBHandle()
+
+    if not db:
+      logging.info('No cidb connection for this build. '
+                   'Doing nothing.')
+      return
+
+    slave_failures = db.GetSlaveFailures(build_id)
+    failures_by_build = cros_build_lib.GroupByKey(slave_failures, 'build_id')
+    for build_id, build_failures in sorted(failures_by_build.items()):
+      failures_by_stage = cros_build_lib.GroupByKey(build_failures,
+                                                    'build_stage_id')
+      # Surface a link to each slave stage that failed, in stage_id sorted
+      # order.
+      for stage_id in sorted(failures_by_stage):
+        failure = failures_by_stage[stage_id][0]
+        # Ignore failures that did not cause their enclosing stage to fail.
+        # Ignore slave builds that are still inflight, because some stage logs
+        # might not have been printed to buildbot yet.
+        # TODO(akeshet) revisit this approach, if we seem to be suppressing
+        # useful information as a result of it.
+        if (failure['stage_status'] != constants.BUILDER_STATUS_FAILED or
+            failure['build_status'] == constants.BUILDER_STATUS_INFLIGHT):
+          continue
+        waterfall_url = constants.WATERFALL_TO_DASHBOARD[failure['waterfall']]
+        slave_stage_url = tree_status.ConstructDashboardURL(
+            waterfall_url,
+            failure['builder_name'],
+            failure['build_number'],
+            failure['stage_name'])
+        logging.PrintBuildbotLink('%s %s' % (failure['build_config'],
+                                             failure['stage_name']),
+                                  slave_stage_url)
+
+
 class BuildReexecutionFinishedStage(generic_stages.BuilderStage,
                                     generic_stages.ArchivingStageMixin):
   """The first stage to run after the final cbuildbot reexecution.
@@ -271,6 +326,7 @@
         'boards': config['boards'],
         'child-configs': child_configs,
         'build_type': config['build_type'],
+        'important': config['important'],
 
         # Data for the toolchain used.
         'sdk-version': sdk_verinfo.get('SDK_LATEST_VERSION', '<unknown>'),
@@ -438,17 +494,6 @@
       tree_status.SendHealthAlert(self._run, title, '\n\n'.join(body),
                                   extra_fields=extra_fields)
 
-  def _UploadMetadataForRun(self, final_status):
-    """Upload metadata.json for this entire run.
-
-    Args:
-      final_status: Final status string for this run.
-    """
-    self._run.attrs.metadata.UpdateWithDict(
-        self.GetReportMetadata(final_status=final_status,
-                               completion_instance=self._completion_instance))
-    self.UploadMetadata()
-
   def _UploadArchiveIndex(self, builder_run):
     """Upload an HTML index for the artifacts at remote archive location.
 
@@ -559,7 +604,7 @@
 
     # Upload metadata, and update the pass/fail streak counter for the main
     # run only. These aren't needed for the child builder runs.
-    self._UploadMetadataForRun(final_status)
+    self.UploadMetadata()
     self._UpdateRunStreak(self._run, final_status)
 
     # Alert if the Pre-CQ has infra failures.
@@ -615,6 +660,11 @@
       # ArchiveResults() depends the existence of this attr.
       self._run.attrs.release_tag = None
 
+    # Set up our report metadata.
+    self._run.attrs.metadata.UpdateWithDict(
+        self.GetReportMetadata(final_status=final_status,
+                               completion_instance=self._completion_instance))
+
     # Some operations can only be performed if a valid version is available.
     try:
       self._run.GetVersionInfo()
@@ -626,7 +676,6 @@
       archive_urls = ''
       metadata_url = ''
 
-
     results_lib.Results.Report(
         sys.stdout, archive_urls=archive_urls,
         current_version=(self._run.attrs.release_tag or ''))
diff --git a/cbuildbot/stages/report_stages_unittest.py b/cbuildbot/stages/report_stages_unittest.py
index c6a4609..afbeda7 100644
--- a/cbuildbot/stages/report_stages_unittest.py
+++ b/cbuildbot/stages/report_stages_unittest.py
@@ -23,6 +23,7 @@
 from chromite.lib import alerts
 from chromite.lib import cidb
 from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
 from chromite.lib import fake_cidb
 from chromite.lib import gs_unittest
 from chromite.lib import osutils
@@ -83,6 +84,43 @@
   def ConstructStage(self):
     return report_stages.BuildReexecutionFinishedStage(self._run)
 
+
+class SlaveFailureSummaryStageTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests that SlaveFailureSummaryStage behaves as expected."""
+
+  def setUp(self):
+    self.db = mock.MagicMock()
+    cidb.CIDBConnectionFactory.SetupMockCidb(self.db)
+    self._Prepare(build_id=1)
+
+  def _Prepare(self, **kwargs):
+    """Prepare stage with config['master']=True."""
+    super(SlaveFailureSummaryStageTest, self)._Prepare(**kwargs)
+    self._run.config['master'] = True
+
+  def ConstructStage(self):
+    return report_stages.SlaveFailureSummaryStage(self._run)
+
+  def testPerformStage(self):
+    """Tests that stage runs without syntax errors."""
+    fake_failure = {
+        'build_id': 10,
+        'build_stage_id': 11,
+        'waterfall': constants.WATERFALL_EXTERNAL,
+        'builder_name': 'builder_name',
+        'build_number': 12,
+        'build_config': 'build-config',
+        'stage_name': 'FailingStage',
+        'stage_status': constants.BUILDER_STATUS_FAILED,
+        'build_status': constants.BUILDER_STATUS_FAILED,
+        }
+    self.PatchObject(self.db, 'GetSlaveFailures', return_value=[fake_failure])
+    self.PatchObject(logging, 'PrintBuildbotLink')
+    self.RunStage()
+    self.assertEqual(logging.PrintBuildbotLink.call_count, 1)
+
+
 class BuildStartStageTest(generic_stages_unittest.AbstractStageTestCase):
   """Tests that BuildStartStage behaves as expected."""
 
diff --git a/cbuildbot/stages/sdk_stages_unittest.py b/cbuildbot/stages/sdk_stages_unittest.py
index 96e3d11..c933b86 100644
--- a/cbuildbot/stages/sdk_stages_unittest.py
+++ b/cbuildbot/stages/sdk_stages_unittest.py
@@ -8,6 +8,7 @@
 
 import json
 import os
+import unittest
 
 from chromite.cbuildbot import commands
 from chromite.cbuildbot import constants
@@ -221,6 +222,8 @@
   def ConstructStage(self):
     return sdk_stages.SDKPackageToolchainOverlaysStage(self._run)
 
+  # TODO(akeshet): determine why this test is flaky
+  @unittest.skip("Skip flaky test.")
   def testTarballCreation(self):
     """Tests that tarballs are created for all board toolchains."""
     self._Prepare('chromiumos-sdk')
diff --git a/cbuildbot/stages/stage_results_unittest b/cbuildbot/stages/stage_results_unittest
deleted file mode 120000
index ef3e37b..0000000
--- a/cbuildbot/stages/stage_results_unittest
+++ /dev/null
@@ -1 +0,0 @@
-../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/sync_stages.py b/cbuildbot/stages/sync_stages.py
index dc2df54..8eb6ce7 100644
--- a/cbuildbot/stages/sync_stages.py
+++ b/cbuildbot/stages/sync_stages.py
@@ -733,10 +733,9 @@
   This stage uses an LKGM manifest manager that handles LKGM
   candidates and their states.
   """
-
-  # TODO(mtennant): Turn this into self._run.attrs.sub_manager or similar.
-  # An instance of lkgm_manager.LKGMManager for slave builds.
-  sub_manager = None
+  # If we are using an internal manifest, but need to be able to create an
+  # external manifest, we create a second manager for that manifest.
+  external_manager = None
   MAX_BUILD_HISTORY_LENGTH = 10
   MilestoneVersion = collections.namedtuple(
       'MilestoneVersion', ['milestone', 'platform'])
@@ -775,12 +774,13 @@
     self.RegisterManifestManager(self._GetInitializedManager(self.internal))
     if self._run.config.master and self._GetSlaveConfigs():
       assert self.internal, 'Unified masters must use an internal checkout.'
-      MasterSlaveLKGMSyncStage.sub_manager = self._GetInitializedManager(False)
+      MasterSlaveLKGMSyncStage.external_manager = \
+          self._GetInitializedManager(False)
 
   def ForceVersion(self, version):
     manifest = super(MasterSlaveLKGMSyncStage, self).ForceVersion(version)
-    if MasterSlaveLKGMSyncStage.sub_manager:
-      MasterSlaveLKGMSyncStage.sub_manager.BootstrapFromVersion(version)
+    if MasterSlaveLKGMSyncStage.external_manager:
+      MasterSlaveLKGMSyncStage.external_manager.BootstrapFromVersion(version)
 
     return manifest
 
@@ -806,8 +806,8 @@
     manifest = self.manifest_manager.CreateNewCandidate(
         chrome_version=self._chrome_version,
         build_id=build_id)
-    if MasterSlaveLKGMSyncStage.sub_manager:
-      MasterSlaveLKGMSyncStage.sub_manager.CreateFromManifest(
+    if MasterSlaveLKGMSyncStage.external_manager:
+      MasterSlaveLKGMSyncStage.external_manager.CreateFromManifest(
           manifest, build_id=build_id)
 
     return manifest
@@ -917,11 +917,14 @@
     changes_to_test = []
 
     _, db = self._run.GetCIDBHandle()
-    actions_for_changes = db.GetActionsForChanges(changes)
-    for change in changes:
-      status = clactions.GetCLPreCQStatus(change, actions_for_changes)
-      if status == constants.CL_STATUS_PASSED:
-        changes_to_test.append(change)
+    if db:
+      actions_for_changes = db.GetActionsForChanges(changes)
+      for change in changes:
+        status = clactions.GetCLPreCQStatus(change, actions_for_changes)
+        if status == constants.CL_STATUS_PASSED:
+          changes_to_test.append(change)
+    else:
+      logging.warning("DB not available, unable to filter for PreCQ passed.")
 
     # Allow Commit-Ready=+2 changes to bypass the Pre-CQ, if there are no other
     # changes.
@@ -978,7 +981,7 @@
       if self._run.options.cq_gerrit_override:
         query = (self._run.options.cq_gerrit_override, None)
 
-      self.pool = pool = validation_pool.ValidationPool.AcquirePool(
+      self.pool = validation_pool.ValidationPool.AcquirePool(
           self._run.config.overlays, self.repo,
           self._run.buildnumber, self._run.GetBuilderName(),
           query,
@@ -1001,10 +1004,10 @@
       db.ExtendDeadline(build_id, timeout)
 
     logging.info('Creating new candidate manifest.')
-    manifest = self.manifest_manager.CreateNewCandidate(validation_pool=pool,
-                                                        build_id=build_id)
-    if MasterSlaveLKGMSyncStage.sub_manager:
-      MasterSlaveLKGMSyncStage.sub_manager.CreateFromManifest(
+    manifest = self.manifest_manager.CreateNewCandidate(
+        validation_pool=self.pool, build_id=build_id)
+    if MasterSlaveLKGMSyncStage.external_manager:
+      MasterSlaveLKGMSyncStage.external_manager.CreateFromManifest(
           manifest, build_id=build_id)
 
     return manifest
diff --git a/cbuildbot/stages/test_stages.py b/cbuildbot/stages/test_stages.py
index 0ee4ad8..cf48d01 100644
--- a/cbuildbot/stages/test_stages.py
+++ b/cbuildbot/stages/test_stages.py
@@ -158,18 +158,6 @@
           prefix = ''
         self.PrintDownloadLink(filename, prefix)
 
-  def _WaitForGceTarball(self, image_path):
-    """Waits until GCE tarball is available."""
-    gce_tar_generated = self.GetParallel('gce_tarball_generated')
-    if not gce_tar_generated:
-      return
-    # Still need to check its availability as artifacts are uploaded in the
-    # background.
-    gs_ctx = gs.GSContext()
-    logging.info('Waiting for GCE tarball to be uploaded at %s.' % image_path)
-    gs_ctx.WaitForGsPaths([image_path], self.CHECK_GCS_TIMEOUT,
-                          self.CHECK_GCS_PERIOD)
-
   def _RunTest(self, test_type, test_results_dir):
     """Run a VM test.
 
@@ -184,11 +172,8 @@
           self._build_root, self._current_board, self.GetImageDirSymlink())
     else:
       if test_type == constants.GCE_VM_TEST_TYPE:
-        # If tests are to run on GCE, use the uploaded tar ball.
-        image_path = ('%s/%s' % (self.download_url.rstrip('/'),
-                                 constants.TEST_IMAGE_GCE_TAR))
-
-        self._WaitForGceTarball(image_path)
+        image_path = os.path.join(self.GetImageDirSymlink(),
+                                  constants.TEST_IMAGE_GCE_TAR)
       else:
         image_path = os.path.join(self.GetImageDirSymlink(),
                                   constants.TEST_IMAGE_BIN)
@@ -203,8 +188,7 @@
       commands.RunTestSuite(self._build_root,
                             self._current_board,
                             image_path,
-                            os.path.join(test_results_dir,
-                                         'test_harness'),
+                            os.path.join(test_results_dir, 'test_harness'),
                             test_type=test_type,
                             whitelist_chrome_crashes=self._chrome_rev is None,
                             archive_dir=self.bot_archive_root,
@@ -306,6 +290,9 @@
     current_board_dict = per_board_dict.get(self._current_board)
     if current_board_dict:
       subsystems = set(current_board_dict.get('subsystems_to_test', []))
+      # 'subsystem:all' indicates to skip the subsystem logic
+      if 'all' in subsystems:
+        subsystems = None
     else:
       subsystems = None
 
@@ -332,7 +319,8 @@
     # Wait for UploadHWTestArtifacts to generate the payloads.
     if not self.GetParallel('delta_payloads_generated',
                             pretty_name='delta payloads'):
-      logging.PrintBuildbotStepWarnings('missing delta payloads')
+      logging.PrintBuildbotStepText('Missing delta payloads.')
+      logging.PrintBuildbotStepWarnings()
       logging.warning('Cannot run HWTest because UploadTestArtifacts failed. '
                       'See UploadTestArtifacts for details.')
       return
diff --git a/cbuildbot/stages/test_stages_unittest.py b/cbuildbot/stages/test_stages_unittest.py
index 02fea21..03fd5e9 100644
--- a/cbuildbot/stages/test_stages_unittest.py
+++ b/cbuildbot/stages/test_stages_unittest.py
@@ -17,14 +17,12 @@
 from chromite.cbuildbot import swarming_lib
 from chromite.cbuildbot import topology
 from chromite.cbuildbot.stages import artifact_stages
-from chromite.cbuildbot.stages import generic_stages
 from chromite.cbuildbot.stages import generic_stages_unittest
 from chromite.cbuildbot.stages import test_stages
 from chromite.lib import cgroups
 from chromite.lib import cros_build_lib_unittest
 from chromite.lib import cros_test_lib
 from chromite.lib import cros_logging as logging
-from chromite.lib import gs
 from chromite.lib import osutils
 from chromite.lib import path_util
 from chromite.lib import timeout_util
@@ -79,32 +77,19 @@
   def testGceTests(self):
     """Tests if GCE_VM_TEST_TYPE tests are run on GCE."""
     self._run.config['vm_tests'] = [constants.GCE_VM_TEST_TYPE]
-    gce_path = constants.TEST_IMAGE_GCE_TAR
-    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+    gce_tarball = constants.TEST_IMAGE_GCE_TAR
 
     # pylint: disable=unused-argument
     def _MockRunTestSuite(buildroot, board, image_path, results_dir, test_type,
                           *args, **kwargs):
-      self.assertEndsWith(image_path, gce_path)
+      self.assertEndsWith(image_path, gce_tarball)
       self.assertEqual(test_type, constants.GCE_VM_TEST_TYPE)
     # pylint: enable=unused-argument
 
-    def _MockWaitForGsPaths(_, paths, *_args, **_kwargs):
-      self.assertEndsWith(paths[0], gce_path)
-
-    self.PatchObject(generic_stages.BoardSpecificBuilderStage, 'GetParallel',
-                     autospec=True)
-    self.PatchObject(gs.GSContext, 'WaitForGsPaths',
-                     side_effect=_MockWaitForGsPaths, autospec=True)
     commands.RunTestSuite.side_effect = _MockRunTestSuite
-    board_runattrs.SetParallel('gce_tarball_generated', True)
 
     self.RunStage()
 
-    generic_stages.BoardSpecificBuilderStage.GetParallel.assert_any_call(
-        mock.ANY, 'gce_tarball_generated')
-    self.assertTrue(gs.GSContext.WaitForGsPaths.called and
-                    gs.GSContext.WaitForGsPaths.call_count == 1)
     self.assertTrue(commands.RunTestSuite.called and
                     commands.RunTestSuite.call_count == 1)
 
diff --git a/cbuildbot/swarming_lib.py b/cbuildbot/swarming_lib.py
index 435a6a9..673b11a 100644
--- a/cbuildbot/swarming_lib.py
+++ b/cbuildbot/swarming_lib.py
@@ -11,7 +11,9 @@
 import os
 
 from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
 from chromite.lib import osutils
+from chromite.lib import retry_util
 
 # Location of swarming_client.py that is used to send swarming requests
 _DIR_NAME = os.path.dirname(os.path.abspath(__file__))
@@ -19,6 +21,8 @@
     _DIR_NAME, '..', 'third_party', 'swarming.client', 'swarming.py'))
 CONNECTION_TYPE_COMMON = 'common'
 CONNECTION_TYPE_MOCK = 'mock'
+# Code 80 - bot died.
+RETRIABLE_INTERNAL_FAILURE_STATES = {80}
 
 
 def RunSwarmingCommand(cmd, swarming_server, task_name=None,
@@ -87,6 +91,55 @@
       raise cros_build_lib.RunCommandError(e.msg, result, e.exception)
 
 
+def SwarmingRetriableErrorCheck(exception):
+  """Check if a swarming error is retriable.
+
+  Args:
+    exception: A cros_build_lib.RunCommandError exception.
+
+  Returns:
+    True if retriable, otherwise False.
+  """
+  if not isinstance(exception, cros_build_lib.RunCommandError):
+    return False
+  result = exception.result
+  if not isinstance(result, SwarmingCommandResult):
+    return False
+  if result.task_summary_json:
+    try:
+      internal_failure = result.task_summary_json[
+          'shards'][0]['internal_failure']
+      state = result.task_summary_json['shards'][0]['state']
+      if internal_failure and state in RETRIABLE_INTERNAL_FAILURE_STATES:
+        logging.warning(
+            'Encountered retriable swarming internal failure: %s',
+            json.dumps(result.task_summary_json, indent=2))
+        return True
+    except (IndexError, KeyError) as e:
+      logging.warning(
+          "Could not determine if %s is retriable, error: %s. json: %s",
+          str(exception), str(e),
+          json.dumps(result.task_summary_json, indent=2))
+  return False
+
+
+def RunSwarmingCommandWithRetries(max_retry, *args, **kwargs):
+  """Wrapper for RunSwarmingCommand that will retry a command.
+
+  Args:
+    max_retry: See RetryCommand.
+    *args: See RetryCommand and RunSwarmingCommand.
+    **kwargs: See RetryCommand and RunSwarmingCommand.
+
+  Returns:
+    A SwarmingCommandResult object.
+
+  Raises:
+    RunCommandError: When the command fails.
+  """
+  return retry_util.RetryCommand(RunSwarmingCommand, max_retry, *args, **kwargs)
+
+
 class SwarmingCommandResult(cros_build_lib.CommandResult):
   """An object to store result of a command that is run via swarming.
 
diff --git a/cbuildbot/triage_lib.py b/cbuildbot/triage_lib.py
index 099dc4a..e6cf6f8 100644
--- a/cbuildbot/triage_lib.py
+++ b/cbuildbot/triage_lib.py
@@ -10,6 +10,7 @@
 import glob
 import os
 import pprint
+import re
 
 from chromite.cbuildbot import failures_lib
 from chromite.cbuildbot import constants
@@ -239,11 +240,9 @@
 def GetTestSubsystemForChange(build_root, change):
   """Get a list of subsystem that a given |change| affects.
 
-  The list of the subsystem that a change affacts is specified in a config file
-  inside the project, named COMMIT-QUEUE.ini. The file would look like this:
-
-  [GENERAL]
-    subsystem: power graphics
+  If subsystem is specified in the commit message, use that. Otherwise, look in
+  appropriate COMMIT-QUEUE.ini. If subsystem is not specified anywhere,
+  'subsystem:default' will be used.
 
   Based on the subsystems a given |change| affects, the CQ could tell whether a
   failure is potentially caused by this |change|. The CQ could then submit some
@@ -256,8 +255,16 @@
   Returns:
     A list of subsystem for the given |change|.
   """
-  result = GetOptionForChange(build_root, change, 'GENERAL', 'subsystem')
-  return result.split() if result else []
+  subsystems = []
+  if change.commit_message:
+    lines = cros_patch.GetOptionLinesFromCommitMessage(
+        change.commit_message, 'subsystem:')
+    if lines:
+      subsystems = [x for x in re.split("[, ]", ' '.join(lines)) if x]
+  if not subsystems:
+    result = GetOptionForChange(build_root, change, 'GENERAL', 'subsystem')
+    subsystems = result.split() if result else []
+  return subsystems if subsystems else ['default']
 
 class CategorizeChanges(object):
   """A collection of methods to help categorize GerritPatch changes.
diff --git a/cbuildbot/triage_lib_unittest.py b/cbuildbot/triage_lib_unittest.py
index ab8716c..efc5338 100644
--- a/cbuildbot/triage_lib_unittest.py
+++ b/cbuildbot/triage_lib_unittest.py
@@ -397,6 +397,34 @@
       result = triage_lib.GetOptionForChange(build_root, change, 'a', 'b')
       self.assertEqual(None, result)
 
+  def testGetSubsystemFromValidCommitMessage(self):
+    """Test whether we can get subsystem from commit message."""
+    change = sync_stages_unittest.MockPatch(
+        commit_message='First line\nThird line\nsubsystem: network audio\n'
+                       'subsystem: wifi')
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value='power light')
+    result = triage_lib.GetTestSubsystemForChange('foo/build/root', change)
+    self.assertEqual(['network', 'audio', 'wifi'], result)
+
+  def testGetSubsystemFromInvalidCommitMessage(self):
+    """Test get subsystem from config file when commit message not have it."""
+    change = sync_stages_unittest.MockPatch(
+        commit_message='First line\nThird line\n')
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value='power light')
+    result = triage_lib.GetTestSubsystemForChange('foo/build/root', change)
+    self.assertEqual(['power', 'light'], result)
+
+  def testGetDefaultSubsystem(self):
+    """Test if we can get default subsystem when subsystem is not specified."""
+    change = sync_stages_unittest.MockPatch(
+        commit_message='First line\nThird line\n')
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value=None)
+    result = triage_lib.GetTestSubsystemForChange('foo/build/root', change)
+    self.assertEqual(['default'], result)
+
 
 class ConfigFileTest(cros_test_lib.MockTestCase):
   """Tests for functions that read config information for a patch."""
diff --git a/cbuildbot/update_binhost_json.py b/cbuildbot/update_binhost_json.py
index 9d1625a..8c7ebf1 100644
--- a/cbuildbot/update_binhost_json.py
+++ b/cbuildbot/update_binhost_json.py
@@ -34,7 +34,7 @@
   cros_build_lib.AssertInsideChroot()
   opts = _ParseArguments(argv)
 
-  site_config = config_lib.LoadConfigFromFile()
+  site_config = config_lib.GetConfig()
 
   logging.info('Generating board configs. This takes about 2m...')
   for key in sorted(binhost.GetChromePrebuiltConfigs(site_config)):
diff --git a/cbuildbot/validation_pool.py b/cbuildbot/validation_pool.py
index b6e96e8..15f18e9 100644
--- a/cbuildbot/validation_pool.py
+++ b/cbuildbot/validation_pool.py
@@ -822,7 +822,7 @@
 
   @_ManifestDecorator
   def Apply(self, changes, frozen=True, honor_ordering=False,
-            changes_filter=None, max_change_count=None):
+            changes_filter=None):
     """Applies changes from pool into the build root specified by the manifest.
 
     This method resolves each given change down into a set of transactions-
@@ -855,10 +855,6 @@
         changes being inspected, and expand the changes if necessary.
         Primarily this is of use for cbuildbot patching when dealing w/
         uploaded/remote patches.
-      max_change_count: If not None, this is a soft integer limit on the number
-        of patches to pull in. We stop pulling in patches as soon as we grab
-        at least this many patches. Note that this limit may be exceeded by N-1,
-        where N is the length of the longest transaction.
 
     Returns:
       A tuple of changes-applied, Exceptions for the changes that failed
@@ -885,9 +881,6 @@
                      change, ', '.join(map(str, resolved[-1][-1])))
         planned.update(plan)
 
-      if max_change_count is not None and len(planned) >= max_change_count:
-        break
-
     if not resolved:
       # No work to do; either no changes were given to us, or all failed
       # to be resolved.
@@ -1129,7 +1122,7 @@
   def __init__(self, overlays, build_root, build_number, builder_name,
                is_master, dryrun, changes=None, non_os_changes=None,
                conflicting_changes=None, pre_cq_trybot=False,
-               tree_was_open=True, builder_run=None):
+               tree_was_open=True, _applied=None, builder_run=None):
     """Initializes an instance by setting default variables to instance vars.
 
     Generally use AcquirePool as an entry pool to a pool rather than this
@@ -1151,11 +1144,12 @@
       pre_cq_trybot: If set to True, this is a Pre-CQ trybot. (Note: The Pre-CQ
         launcher is NOT considered a Pre-CQ trybot.)
       tree_was_open: Whether the tree was open when the pool was created.
+      applied: List of CLs that have been applied to the current repo. Not
+        yet used, but needs to be here for pickling compatibility.
       builder_run: BuilderRun instance used to fetch cidb handle and metadata
         instance. Please note due to the pickling logic, this MUST be the last
         kwarg listed.
     """
-
     self.build_root = build_root
 
     # These instances can be instantiated via both older, or newer pickle
@@ -1622,6 +1616,22 @@
 
       logging.PrintBuildbotLink(s, change.url)
 
+  def FilterChangesForThrottledTree(self):
+    """Apply Throttled Tree logic to select patch candidates.
+
+    If the tree is throttled, we only test a random subset of our candidate
+    changes. Call this to select that subset, and throw away unrelated changes.
+
+    If the three was open when this pool was created, it does nothing.
+    """
+    if self.tree_was_open:
+      return
+
+    fail_streak = self._GetFailStreak()
+    test_pool_size = max(1, len(self.changes) / (2**fail_streak))
+    random.shuffle(self.changes)
+    self.changes = self.changes[:test_pool_size]
+
   def ApplyPoolIntoRepo(self, manifest=None):
     """Applies changes from pool into the directory specified by the buildroot.
 
@@ -1640,18 +1650,11 @@
     failed_inflight = []
     patch_series = PatchSeries(self.build_root, helper_pool=self._helper_pool)
 
-    # Only try a subset of the changes if the tree was throttled.
-    max_change_count = len(self.changes)
-    if not self.tree_was_open:
-      random.shuffle(self.changes)
-      fail_streak = self._GetFailStreak()
-      max_change_count = max(1, len(self.changes) / (2**fail_streak))
-
     if self.is_master:
       try:
         # pylint: disable=E1123
         applied, failed_tot, failed_inflight = patch_series.Apply(
-            self.changes, manifest=manifest, max_change_count=max_change_count)
+            self.changes, manifest=manifest)
       except (KeyboardInterrupt, RuntimeError, SystemExit):
         raise
       except Exception as e:
diff --git a/cbuildbot/validation_pool_unittest.py b/cbuildbot/validation_pool_unittest.py
index 228c5e4..3fab2d1 100644
--- a/cbuildbot/validation_pool_unittest.py
+++ b/cbuildbot/validation_pool_unittest.py
@@ -16,7 +16,6 @@
 import mox
 import os
 import pickle
-import random
 import tempfile
 import time
 
@@ -878,10 +877,7 @@
   def MakeFailure(self, patch, inflight=True):
     return cros_patch.ApplyPatchException(patch, inflight=inflight)
 
-  def GetPool(self, changes, applied=(), tot=(), inflight=(),
-              max_change_count=None, **kwargs):
-    if not max_change_count:
-      max_change_count = len(changes)
+  def GetPool(self, changes, applied=(), tot=(), inflight=(), **kwargs):
 
     pool = self.MakePool(changes=changes, fake_db=self.fake_db, **kwargs)
     applied = list(applied)
@@ -889,8 +885,7 @@
     inflight = [self.MakeFailure(x, inflight=True) for x in inflight]
     # pylint: disable=E1120,E1123
     validation_pool.PatchSeries.Apply(
-        changes, manifest=mox.IgnoreArg(), max_change_count=max_change_count
-        ).AndReturn((applied, tot, inflight))
+        changes, manifest=mox.IgnoreArg()).AndReturn((applied, tot, inflight))
 
     for patch in applied:
       pool.HandleApplySuccess(patch, mox.IgnoreArg()).AndReturn(None)
@@ -1138,8 +1133,7 @@
 
     # pylint: disable=E1120,E1123
     validation_pool.PatchSeries.Apply(
-        patches, manifest=mox.IgnoreArg(),
-        max_change_count=len(patches)).AndRaise(MyException)
+        patches, manifest=mox.IgnoreArg()).AndRaise(MyException)
     errors = [mox.Func(functools.partial(VerifyCQError, x)) for x in patches]
     pool._HandleApplyFailure(errors).AndReturn(None)
 
@@ -1366,47 +1360,72 @@
 
     self.assertEqual(slave_pool._GetFailStreak(), 0)
 
-  def testApplyWithTreeNotOpen(self):
+  def testFilterChangesForThrottledTree(self):
     """Tests that we can correctly apply exponential fallback."""
     patches = self.GetPatches(4)
-
-    # We mock out the shuffle so that we can deterministically test.
-    self.mox.StubOutWithMock(random, 'shuffle')
     self.mox.StubOutWithMock(validation_pool.ValidationPool, '_GetFailStreak')
 
-    slave_pool = self.GetPool(changes=patches, applied=patches[:2],
-                              max_change_count=2,
-                              tree_was_open=False, handlers=True)
-    random.shuffle(patches) # Mock.
+    #
+    # Test when tree is open.
+    #
+    self.mox.ReplayAll()
+
+    # Perform test.
+    slave_pool = self.MakePool(changes=patches, tree_was_open=True)
+    slave_pool.FilterChangesForThrottledTree()
+
+    # Validate results.
+    self.assertEqual(len(slave_pool.changes), 4)
+    self.mox.VerifyAll()
+    self.mox.ResetAll()
+
+    #
+    # Test when tree is closed with a streak of 1.
+    #
+
     # pylint: disable=no-value-for-parameter
     validation_pool.ValidationPool._GetFailStreak().AndReturn(1)
-
     self.mox.ReplayAll()
-    self.runApply(slave_pool, True)
+
+    # Perform test.
+    slave_pool = self.MakePool(changes=patches, tree_was_open=False)
+    slave_pool.FilterChangesForThrottledTree()
+
+    # Validate results.
     self.assertEqual(len(slave_pool.changes), 2)
     self.mox.VerifyAll()
     self.mox.ResetAll()
 
-    slave_pool = self.GetPool(changes=patches, applied=patches[:1],
-                              max_change_count=1,
-                              tree_was_open=False, handlers=True)
-    random.shuffle(patches) # Mock.
-    validation_pool.ValidationPool._GetFailStreak().AndReturn(2)
+    #
+    # Test when tree is closed with a streak of 2.
+    #
 
+    # pylint: disable=no-value-for-parameter
+    validation_pool.ValidationPool._GetFailStreak().AndReturn(2)
     self.mox.ReplayAll()
-    self.runApply(slave_pool, True)
+
+    # Perform test.
+    slave_pool = self.MakePool(changes=patches, tree_was_open=False)
+    slave_pool.FilterChangesForThrottledTree()
+
+    # Validate results.
     self.assertEqual(len(slave_pool.changes), 1)
     self.mox.VerifyAll()
     self.mox.ResetAll()
 
-    slave_pool = self.GetPool(changes=patches, applied=patches[:1],
-                              max_change_count=1,
-                              tree_was_open=False, handlers=True)
-    random.shuffle(patches) # Mock.
-    validation_pool.ValidationPool._GetFailStreak().AndReturn(10)
+    #
+    # Test when tree is closed with a streak of many.
+    #
 
+    # pylint: disable=no-value-for-parameter
+    validation_pool.ValidationPool._GetFailStreak().AndReturn(200)
     self.mox.ReplayAll()
-    self.runApply(slave_pool, True)
+
+    # Perform test.
+    slave_pool = self.MakePool(changes=patches, tree_was_open=False)
+    slave_pool.FilterChangesForThrottledTree()
+
+    # Validate results.
     self.assertEqual(len(slave_pool.changes), 1)
     self.mox.VerifyAll()
 
diff --git a/cidb/developer.readme b/cidb/developer.readme
index 6cc95b3..d8117d3 100644
--- a/cidb/developer.readme
+++ b/cidb/developer.readme
@@ -12,12 +12,15 @@
 The test logs the path to the temporary working directory at the end.
 
 You can launch the mysqld server again to play with the database in its final
-state. If `tmpdir` is the temporary directory left behind by the test, Inside
-the chroot, run:
+state. You'll have to fish out the temp directory that it created -- it will
+look something like /tmp/chromite.test_no_cleanup3WqzmO/chromite.testYypd_c/
+
+Set your local tmpdir variable to the path that you found, and run
+(inside the chroot):
 
 $ /usr/sbin/mysqld --no-defaults --datadir ${tmpdir}/mysqld_dir --socket \
     ${tmpdir}/mysqld_dir/mysqld.socket --port 8440 --pid-file \
-    ${tmpdir}/mysqld_dir/mysqld.pid --tmpdir ${tmpdir}/mysqld_dir/tmp
+    ${tmpdir}/mysqld_dir/mysqld.pid --tmpdir ${tmpdir}/mysqld_dir/tmp &
 
 You can connect to this instance using mysql client.
 
diff --git a/cidb/migrations/00042_create_build_message_table.sql b/cidb/migrations/00042_create_build_message_table.sql
new file mode 100644
index 0000000..6ab047d
--- /dev/null
+++ b/cidb/migrations/00042_create_build_message_table.sql
@@ -0,0 +1,15 @@
+CREATE TABLE buildMessageTable (
+  id INT NOT NULL AUTO_INCREMENT,
+  build_id INT NOT NULL,
+  message_type VARCHAR(240),
+  message_subtype VARCHAR(240),
+  message_value VARCHAR(480),
+  timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  PRIMARY KEY(id),
+  FOREIGN KEY (build_id)
+    REFERENCES buildTable(id)
+);
+
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (42, '00042_create_build_message_table.sql');
diff --git a/cidb/migrations/00043_alter_build_table_add_important.sql b/cidb/migrations/00043_alter_build_table_add_important.sql
new file mode 100644
index 0000000..da4c3d1
--- /dev/null
+++ b/cidb/migrations/00043_alter_build_table_add_important.sql
@@ -0,0 +1,5 @@
+ALTER TABLE buildTable
+  ADD COLUMN important BOOLEAN DEFAULT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (43, '00043_alter_build_table_add_important.sql');
diff --git a/cidb/migrations/00044_alter_views_add_important.sql b/cidb/migrations/00044_alter_views_add_important.sql
new file mode 100644
index 0000000..eff3284
--- /dev/null
+++ b/cidb/migrations/00044_alter_views_add_important.sql
@@ -0,0 +1,32 @@
+-- The failureView consists of:
+-- all failureTable columns, by original name.
+-- all buildStageTable columns except build_id, and its own id (which come
+-- from f.*), with non-colliding names
+-- all buildTable columns, with non-colliding names
+ALTER VIEW failureView AS
+  SELECT f.*,
+    bs.name AS stage_name, bs.board, bs.status AS stage_status,
+    bs.last_updated AS stage_last_updated, bs.start_time AS stage_start_time,
+    bs.finish_time AS stage_finish_time, bs.final AS stage_final,
+    b.id AS build_id, b.last_updated AS build_last_updated, b.master_build_id,
+    b.buildbot_generation, b.builder_name, b.waterfall, b.build_number,
+    b.build_config, b.bot_hostname, b.start_time AS build_start_time,
+    b.finish_time AS build_finish_time, b.status AS build_status, b.build_type,
+    b.chrome_version, b.milestone_version, b.platform_version, b.full_version,
+    b.sdk_version, b.toolchain_url, b.final AS build_final, b.metadata_url,
+    b.summary, b.deadline, b.important
+  FROM failureTable f JOIN buildStageTable bs on f.build_stage_id = bs.id
+                      JOIN buildTable b on bs.build_id = b.id;
+
+ALTER VIEW clActionView as
+  SELECT c.*,
+    b.last_updated, b.master_build_id, b.buildbot_generation, b.builder_name,
+    b.waterfall, b.build_number, b.build_config, b.bot_hostname, b.start_time,
+    b.finish_time, b.status, b.build_type, b.chrome_version,
+    b.milestone_version, b.platform_version, b.full_version, b.sdk_version,
+    b.toolchain_url, b.final, b.metadata_url, b.summary, b.deadline,
+    b.important
+ FROM clActionTable c JOIN buildTable b on c.build_id = b.id;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (44, '00044_alter_views_add_important.sql')
diff --git a/cidb/migrations/00045_alter_build_message_table_add_board.sql b/cidb/migrations/00045_alter_build_message_table_add_board.sql
new file mode 100644
index 0000000..47385e3
--- /dev/null
+++ b/cidb/migrations/00045_alter_build_message_table_add_board.sql
@@ -0,0 +1,6 @@
+ALTER TABLE buildMessageTable
+  ADD COLUMN board VARCHAR(240) DEFAULT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (45, '00045_alter_build_message_table_add_board.sql');
+
diff --git a/cidb/schema.dump b/cidb/schema.dump
index 598bb3e..7ad64ab 100644
--- a/cidb/schema.dump
+++ b/cidb/schema.dump
@@ -50,6 +50,24 @@
 /*!40101 SET character_set_client = @saved_cs_client */;
 
 
+DROP TABLE IF EXISTS `buildMessageTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `buildMessageTable` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `build_id` int(11) NOT NULL,
+  `message_type` varchar(240) DEFAULT NULL,
+  `message_subtype` varchar(240) DEFAULT NULL,
+  `message_value` varchar(480) DEFAULT NULL,
+  `timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  `board` varchar(240) DEFAULT NULL,
+  PRIMARY KEY (`id`),
+  KEY `build_id` (`build_id`),
+  CONSTRAINT `buildMessageTable_ibfk_1` FOREIGN KEY (`build_id`) REFERENCES `buildTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
 DROP TABLE IF EXISTS `buildStageTable`;
 /*!40101 SET @saved_cs_client     = @@character_set_client */;
 /*!40101 SET character_set_client = utf8 */;
@@ -99,6 +117,7 @@
   `metadata_url` varchar(240) DEFAULT NULL,
   `summary` varchar(1024) DEFAULT NULL,
   `deadline` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
+  `important` tinyint(1) DEFAULT NULL,
   PRIMARY KEY (`id`),
   UNIQUE KEY `buildbot_generation` (`buildbot_generation`,`builder_name`,`waterfall`,`build_number`),
   KEY `master_build_id` (`master_build_id`),
@@ -182,7 +201,8 @@
   `final` tinyint NOT NULL,
   `metadata_url` tinyint NOT NULL,
   `summary` tinyint NOT NULL,
-  `deadline` tinyint NOT NULL
+  `deadline` tinyint NOT NULL,
+  `important` tinyint NOT NULL
 ) ENGINE=MyISAM */;
 SET character_set_client = @saved_cs_client;
 
@@ -250,7 +270,8 @@
   `build_final` tinyint NOT NULL,
   `metadata_url` tinyint NOT NULL,
   `summary` tinyint NOT NULL,
-  `deadline` tinyint NOT NULL
+  `deadline` tinyint NOT NULL,
+  `important` tinyint NOT NULL
 ) ENGINE=MyISAM */;
 SET character_set_client = @saved_cs_client;
 
@@ -287,7 +308,8 @@
 /*!50001 SET character_set_results     = utf8 */;
 /*!50001 SET collation_connection      = utf8_general_ci */;
 /*!50001 CREATE ALGORITHM=UNDEFINED */
-/*!50001 VIEW `clActionView` AS select `c`.`id` AS `id`,`c`.`build_id` AS `build_id`,`c`.`change_number` AS `change_number`,`c`.`patch_number` AS `patch_number`,`c`.`change_source` AS `change_source`,`c`.`action` AS `action`,`c`.`reason` AS `reason`,`c`.`timestamp` AS `timestamp`,`b`.`last_updated` AS `last_updated`,`b`.`master_build_id` AS `master_build_id`,`b`.`buildbot_generation` AS `buildbot_generation`,`b`.`builder_name` AS `builder_name`,`b`.`waterfall` AS `waterfall`,`b`.`build_number` AS `build_number`,`b`.`build_config` AS `build_config`,`b`.`bot_hostname` AS `bot_hostname`,`b`.`start_time` AS `start_time`,`b`.`finish_time` AS `finish_time`,`b`.`status` AS `status`,`b`.`build_type` AS `build_type`,`b`.`chrome_version` AS `chrome_version`,`b`.`milestone_version` AS `milestone_version`,`b`.`platform_version` AS `platform_version`,`b`.`full_version` AS `full_version`,`b`.`sdk_version` AS `sdk_version`,`b`.`toolchain_url` AS `toolchain_url`,`b`.`final` AS `final`,`b`.`metadata_url` AS `metadata_url`,`b`.`summary` AS `summary`,`b`.`deadline` AS `deadline` from (`clActionTable` `c` join `buildTable` `b` on((`c`.`build_id` = `b`.`id`))) */;
+/*!50013 DEFINER=`root`@`localhost` SQL SECURITY DEFINER */
+/*!50001 VIEW `clActionView` AS select `c`.`id` AS `id`,`c`.`build_id` AS `build_id`,`c`.`change_number` AS `change_number`,`c`.`patch_number` AS `patch_number`,`c`.`change_source` AS `change_source`,`c`.`action` AS `action`,`c`.`reason` AS `reason`,`c`.`timestamp` AS `timestamp`,`b`.`last_updated` AS `last_updated`,`b`.`master_build_id` AS `master_build_id`,`b`.`buildbot_generation` AS `buildbot_generation`,`b`.`builder_name` AS `builder_name`,`b`.`waterfall` AS `waterfall`,`b`.`build_number` AS `build_number`,`b`.`build_config` AS `build_config`,`b`.`bot_hostname` AS `bot_hostname`,`b`.`start_time` AS `start_time`,`b`.`finish_time` AS `finish_time`,`b`.`status` AS `status`,`b`.`build_type` AS `build_type`,`b`.`chrome_version` AS `chrome_version`,`b`.`milestone_version` AS `milestone_version`,`b`.`platform_version` AS `platform_version`,`b`.`full_version` AS `full_version`,`b`.`sdk_version` AS `sdk_version`,`b`.`toolchain_url` AS `toolchain_url`,`b`.`final` AS `final`,`b`.`metadata_url` AS `metadata_url`,`b`.`summary` AS `summary`,`b`.`deadline` AS `deadline`,`b`.`important` AS `important` from (`clActionTable` `c` join `buildTable` `b` on((`c`.`build_id` = `b`.`id`))) */;
 /*!50001 SET character_set_client      = @saved_cs_client */;
 /*!50001 SET character_set_results     = @saved_cs_results */;
 /*!50001 SET collation_connection      = @saved_col_connection */;
@@ -302,7 +324,8 @@
 /*!50001 SET character_set_results     = utf8 */;
 /*!50001 SET collation_connection      = utf8_general_ci */;
 /*!50001 CREATE ALGORITHM=UNDEFINED */
-/*!50001 VIEW `failureView` AS select `f`.`id` AS `id`,`f`.`build_stage_id` AS `build_stage_id`,`f`.`outer_failure_id` AS `outer_failure_id`,`f`.`exception_type` AS `exception_type`,`f`.`exception_message` AS `exception_message`,`f`.`exception_category` AS `exception_category`,`f`.`extra_info` AS `extra_info`,`f`.`timestamp` AS `timestamp`,`bs`.`name` AS `stage_name`,`bs`.`board` AS `board`,`bs`.`status` AS `stage_status`,`bs`.`last_updated` AS `stage_last_updated`,`bs`.`start_time` AS `stage_start_time`,`bs`.`finish_time` AS `stage_finish_time`,`bs`.`final` AS `stage_final`,`b`.`id` AS `build_id`,`b`.`last_updated` AS `build_last_updated`,`b`.`master_build_id` AS `master_build_id`,`b`.`buildbot_generation` AS `buildbot_generation`,`b`.`builder_name` AS `builder_name`,`b`.`waterfall` AS `waterfall`,`b`.`build_number` AS `build_number`,`b`.`build_config` AS `build_config`,`b`.`bot_hostname` AS `bot_hostname`,`b`.`start_time` AS `build_start_time`,`b`.`finish_time` AS `build_finish_time`,`b`.`status` AS `build_status`,`b`.`build_type` AS `build_type`,`b`.`chrome_version` AS `chrome_version`,`b`.`milestone_version` AS `milestone_version`,`b`.`platform_version` AS `platform_version`,`b`.`full_version` AS `full_version`,`b`.`sdk_version` AS `sdk_version`,`b`.`toolchain_url` AS `toolchain_url`,`b`.`final` AS `build_final`,`b`.`metadata_url` AS `metadata_url`,`b`.`summary` AS `summary`,`b`.`deadline` AS `deadline` from ((`failureTable` `f` join `buildStageTable` `bs` on((`f`.`build_stage_id` = `bs`.`id`))) join `buildTable` `b` on((`bs`.`build_id` = `b`.`id`))) */;
+/*!50013 DEFINER=`root`@`localhost` SQL SECURITY DEFINER */
+/*!50001 VIEW `failureView` AS select `f`.`id` AS `id`,`f`.`build_stage_id` AS `build_stage_id`,`f`.`outer_failure_id` AS `outer_failure_id`,`f`.`exception_type` AS `exception_type`,`f`.`exception_message` AS `exception_message`,`f`.`exception_category` AS `exception_category`,`f`.`extra_info` AS `extra_info`,`f`.`timestamp` AS `timestamp`,`bs`.`name` AS `stage_name`,`bs`.`board` AS `board`,`bs`.`status` AS `stage_status`,`bs`.`last_updated` AS `stage_last_updated`,`bs`.`start_time` AS `stage_start_time`,`bs`.`finish_time` AS `stage_finish_time`,`bs`.`final` AS `stage_final`,`b`.`id` AS `build_id`,`b`.`last_updated` AS `build_last_updated`,`b`.`master_build_id` AS `master_build_id`,`b`.`buildbot_generation` AS `buildbot_generation`,`b`.`builder_name` AS `builder_name`,`b`.`waterfall` AS `waterfall`,`b`.`build_number` AS `build_number`,`b`.`build_config` AS `build_config`,`b`.`bot_hostname` AS `bot_hostname`,`b`.`start_time` AS `build_start_time`,`b`.`finish_time` AS `build_finish_time`,`b`.`status` AS `build_status`,`b`.`build_type` AS `build_type`,`b`.`chrome_version` AS `chrome_version`,`b`.`milestone_version` AS `milestone_version`,`b`.`platform_version` AS `platform_version`,`b`.`full_version` AS `full_version`,`b`.`sdk_version` AS `sdk_version`,`b`.`toolchain_url` AS `toolchain_url`,`b`.`final` AS `build_final`,`b`.`metadata_url` AS `metadata_url`,`b`.`summary` AS `summary`,`b`.`deadline` AS `deadline`,`b`.`important` AS `important` from ((`failureTable` `f` join `buildStageTable` `bs` on((`f`.`build_stage_id` = `bs`.`id`))) join `buildTable` `b` on((`bs`.`build_id` = `b`.`id`))) */;
 /*!50001 SET character_set_client      = @saved_cs_client */;
 /*!50001 SET character_set_results     = @saved_cs_results */;
 /*!50001 SET collation_connection      = @saved_col_connection */;
diff --git a/cidb/schema.dump.readme b/cidb/schema.dump.readme
index d8e2aef..d650300 100644
--- a/cidb/schema.dump.readme
+++ b/cidb/schema.dump.readme
@@ -1,15 +1,15 @@
 These instructions assume you have a full chromiumos checkout at
 ~/chromiumos/
 
-To generate a schema dump, run
-  $ lib/cidb_integration_test.py CIDBMigrationsTest.testMigrations --no-wipe
-to bring the test database instance to the latest schema (as reflected in your
-source tree).
+To generate a schema dump, run this inside the chroot:
+  $ lib/cidb_integration_test CIDBMigrationsTest.testMigrations --no-wipe
+
 Then, follow the instructions in the developer.readme to re-launch the mysqld
-daemon from the temporary directory and dump the schema using:
+daemon from the temporary directory and dump the schema using (again, inside the
+chroot):
 
 $ mysqldump -u root -S ${tmpdir}/mysqld_dir/mysqld.socket --no-data \
     --single-transaction cidb | grep -v '^--' \
-    > ~/chromiumos/chromite/cidb/schema.dump
+    > ~/trunk/chromite/cidb/schema.dump
 
 Remember to cleanup the temporary directory when you're done.
diff --git a/cli/command.py b/cli/command.py
index 360a6ec..7f4e4b0 100644
--- a/cli/command.py
+++ b/cli/command.py
@@ -19,13 +19,10 @@
 import os
 
 from chromite.cbuildbot import constants
-from chromite.lib import brick_lib
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib
 from chromite.lib import cros_import
 from chromite.lib import cros_logging as logging
-from chromite.lib import osutils
-from chromite.lib import workspace_lib
 
 
 # Paths for finding and importing subcommand modules.
@@ -36,26 +33,6 @@
 _commands = dict()
 
 
-def SetupFileLogger(filename='brillo.log', log_level=logging.DEBUG):
-  """Store log messages to a file.
-
-  In case of an error, this file can be made visible to the user.
-  """
-  workspace_path = workspace_lib.WorkspacePath()
-  if workspace_path is None:
-    return
-  path = os.path.join(workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
-                      filename)
-  osutils.Touch(path, makedirs=True)
-  logger = logging.getLogger()
-  fh = logging.FileHandler(path, mode='w')
-  fh.setLevel(log_level)
-  fh.setFormatter(
-      logging.Formatter(fmt=constants.LOGGER_FMT,
-                        datefmt=constants.LOGGER_DATE_FMT))
-  logger.addHandler(fh)
-
-
 def UseProgressBar():
   """Determine whether the progress bar is to be used or not.
 
@@ -158,8 +135,6 @@
 
   def __init__(self, options):
     self.options = options
-    brick = brick_lib.FindBrickInPath()
-    self.curr_brick_locator = brick.brick_locator if brick else None
 
   @classmethod
   def AddParser(cls, parser):
diff --git a/cli/command_unittest.py b/cli/command_unittest.py
index 33c0725..8d9dea3 100644
--- a/cli/command_unittest.py
+++ b/cli/command_unittest.py
@@ -15,10 +15,8 @@
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib_unittest
 from chromite.lib import cros_import
-from chromite.lib import cros_logging as logging
 from chromite.lib import cros_test_lib
 from chromite.lib import partial_mock
-from chromite.lib import workspace_lib
 
 
 # pylint:disable=protected-access
@@ -132,48 +130,3 @@
     # Pick some commands that are likely to not go away.
     self.assertIn('chrome-sdk', cros_commands)
     self.assertIn('flash', cros_commands)
-
-
-class FileLoggerSetupTest(cros_test_lib.WorkspaceTestCase):
-  """Test that logging to file works correctly."""
-
-  def setUp(self):
-    self.CreateWorkspace()
-
-  def testSetupFileLoggerFilename(self):
-    """Test that the filename and path are correct."""
-    patch_handler = self.PatchObject(logging, 'FileHandler',
-                                     return_value=logging.StreamHandler())
-    command.SetupFileLogger(filename='foo.log')
-
-    # Test that the filename is correct.
-    patch_handler.assert_called_with(
-        os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
-                     'foo.log'), mode='w')
-
-  def testSetupFileLoggerNoFilename(self):
-    """Test that the filename and path are correct with no arguments."""
-    patch_handler = self.PatchObject(logging, 'FileHandler',
-                                     return_value=logging.StreamHandler())
-    command.SetupFileLogger()
-
-    # Test that the filename is correct.
-    patch_handler.assert_called_with(
-        os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
-                     'brillo.log'), mode='w')
-
-  def testSetupFileLoggerLogLevels(self):
-    """Test that the logger operates at the right level."""
-    command.SetupFileLogger('foo.log', log_level=logging.INFO)
-    logging.getLogger().setLevel(logging.DEBUG)
-    logging.debug('debug')
-    logging.info('info')
-    logging.notice('notice')
-
-    # Test that the logs are correct.
-    logs = open(
-        os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
-                     'foo.log'), 'r').read()
-    self.assertNotIn('debug', logs)
-    self.assertIn('info', logs)
-    self.assertIn('notice', logs)
diff --git a/cli/cros/cros_build.py b/cli/cros/cros_build.py
index b868c8f..0709288 100644
--- a/cli/cros/cros_build.py
+++ b/cli/cros/cros_build.py
@@ -7,15 +7,12 @@
 from __future__ import print_function
 
 from chromite.cli import command
-from chromite.lib import blueprint_lib
-from chromite.lib import brick_lib
 from chromite.lib import chroot_util
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib
 from chromite.lib import cros_logging as logging
 from chromite.lib import operation
 from chromite.lib import parallel
-from chromite.lib import toolchain
 from chromite.lib import workon_helper
 
 
@@ -51,40 +48,23 @@
     self.host = False
     self.board = None
     self.brick = None
-    self.blueprint = None
 
     if self.options.host:
       self.host = True
     elif self.options.board:
       self.board = self.options.board
-    elif self.options.blueprint:
-      self.blueprint = blueprint_lib.Blueprint(self.options.blueprint)
-
-      if not self.build_pkgs:
-        self.build_pkgs = self.blueprint.GetPackages()
-    elif self.options.brick or self.curr_brick_locator:
-      self.brick = brick_lib.Brick(self.options.brick
-                                   or self.curr_brick_locator)
-      self.board = self.brick.FriendlyName()
-      if not self.build_pkgs:
-        self.build_pkgs = self.brick.MainPackages()
     else:
       # If nothing is explicitly set, use the default board.
       self.board = cros_build_lib.GetDefaultBoard()
 
     # Set sysroot and friendly name. The latter is None if building for host.
-    self.sysroot = cros_build_lib.GetSysroot(self.blueprint.FriendlyName()
-                                             if self.blueprint else self.board)
+    self.sysroot = cros_build_lib.GetSysroot(self.board)
 
   @classmethod
   def AddParser(cls, parser):
     super(cls, BuildCommand).AddParser(parser)
     target = parser.add_mutually_exclusive_group()
     target.add_argument('--board', help='The board to build packages for.')
-    target.add_argument('--brick', type='brick_path',
-                        help='The brick to build packages for.')
-    target.add_argument('--blueprint', type='blueprint_path',
-                        help='The blueprint to build packages for.')
     target.add_argument('--host', help='Build packages for the chroot itself.',
                         default=False, action='store_true')
     parser.add_argument('--no-binary', help="Don't use binary packages.",
@@ -134,7 +114,7 @@
 
     Only print the output if this step fails or if we're in debug mode.
     """
-    if self.options.deps and not self.host and not self.blueprint:
+    if self.options.deps and not self.host:
       cmd = chroot_util.GetEmergeCommand(sysroot=self.sysroot)
       cmd += ['-pe', '--backtrack=0'] + self.build_pkgs
       try:
@@ -160,7 +140,7 @@
     self.options.Freeze()
 
     if not self.host:
-      if not (self.board or self.brick or self.blueprint):
+      if not (self.board or self.brick):
         cros_build_lib.Die('You did not specify a board/brick to build for. '
                            'You need to be in a brick directory or set '
                            '--board/--brick/--host')
@@ -169,11 +149,7 @@
         cros_build_lib.Die('--brick should not be used with board names. Use '
                            '--board=%s instead.' % self.brick.config['name'])
 
-    if self.blueprint:
-      chroot_args = ['--toolchains',
-                     ','.join(toolchain.GetToolchainsForBrick(
-                         self.blueprint.GetBSP()).iterkeys())]
-    elif self.board:
+    if self.board:
       chroot_args = ['--board', self.board]
     else:
       chroot_args = None
@@ -184,13 +160,7 @@
       cros_build_lib.Die('No packages found, nothing to build.')
 
     # Set up the sysroots if not building for host.
-    if self.blueprint:
-      if self.chroot_update:
-        chroot_util.UpdateChroot(
-            update_host_packages=self.options.host_packages_update,
-            brick=brick_lib.Brick(self.blueprint.GetBSP()))
-      chroot_util.InitializeSysroots(self.blueprint)
-    elif self.brick or self.board:
+    if self.brick or self.board:
       chroot_util.SetupBoard(
           brick=self.brick, board=self.board,
           update_chroot=self.chroot_update,
diff --git a/cli/cros/cros_chrome_sdk.py b/cli/cros/cros_chrome_sdk.py
index 73ac533..116a941 100644
--- a/cli/cros/cros_chrome_sdk.py
+++ b/cli/cros/cros_chrome_sdk.py
@@ -23,6 +23,7 @@
 from chromite.lib import osutils
 from chromite.lib import path_util
 from chromite.lib import stats
+from chromite.cbuildbot import archive_lib
 from chromite.cbuildbot import config_lib
 from chromite.cbuildbot import constants
 
@@ -91,7 +92,7 @@
         force usage of the external configuration if both external and internal
         are available.
     """
-    site_config = config_lib.LoadConfigFromFile()
+    site_config = config_lib.GetConfig()
 
     self.cache_base = os.path.join(cache_dir, COMMAND_NAME)
     if clear_cache:
@@ -104,8 +105,7 @@
     self.board = board
     self.config = site_config.FindCanonicalConfigForBoard(
         board, allow_internal=not use_external_config)
-    self.gs_base = '%s/%s' % (constants.DEFAULT_ARCHIVE_BUCKET,
-                              self.config['name'])
+    self.gs_base = archive_lib.GetBaseUploadURI(self.config)
     self.clear_cache = clear_cache
     self.chrome_src = chrome_src
     self.sdk_path = sdk_path
diff --git a/cli/cros/cros_payload.py b/cli/cros/cros_payload.py
index ebac811..9581b59 100644
--- a/cli/cros/cros_payload.py
+++ b/cli/cros/cros_payload.py
@@ -8,7 +8,6 @@
 
 import itertools
 import os
-import string
 import sys
 import textwrap
 
@@ -18,6 +17,8 @@
 # Needed for the dev.host.lib import below.
 sys.path.insert(0, os.path.join(constants.SOURCE_ROOT, 'src', 'platform'))
 
+MAJOR_PAYLOAD_VERSION_CHROMEOS = 1
+MAJOR_PAYLOAD_VERSION_BRILLO = 2
 
 def DisplayValue(key, value):
   """Print out a key, value pair with values left-aligned."""
@@ -35,7 +36,7 @@
           ' '.join('%.2x' % ord(c) for c in chunk) +
           '   ' * (16 - len(chunk)) +
           ' | ' +
-          ''.join(c if c in string.printable else '.' for c in chunk))
+          ''.join(c if 32 <= ord(c) and ord(c) < 127 else '.' for c in chunk))
 
 
 @command.CommandDecorator('payload')
@@ -85,30 +86,52 @@
   def _DisplayManifest(self):
     """Show information from the payload manifest."""
     manifest = self.payload.manifest
-    DisplayValue('Number of operations', len(manifest.install_operations))
-    DisplayValue('Number of kernel ops',
-                 len(manifest.kernel_install_operations))
+    if self.payload.header.version == MAJOR_PAYLOAD_VERSION_BRILLO:
+      DisplayValue('Number of partitions', len(manifest.partitions))
+      for partition in manifest.partitions:
+        DisplayValue('  Number of "%s" ops' % partition.partition_name,
+                     len(partition.operations))
+    else:
+      DisplayValue('Number of operations', len(manifest.install_operations))
+      DisplayValue('Number of kernel ops',
+                   len(manifest.kernel_install_operations))
     DisplayValue('Block size', manifest.block_size)
     DisplayValue('Minor version', manifest.minor_version)
 
   def _DisplaySignatures(self):
     """Show information about the signatures from the manifest."""
+    header = self.payload.header
+    if header.metadata_signature_len:
+      offset = header.size + header.manifest_len
+      DisplayValue('Metadata signatures blob',
+                   'file_offset=%d (%d bytes)' %
+                   (offset, header.metadata_signature_len))
+      signatures_blob = self.payload.ReadDataBlob(
+          -header.metadata_signature_len,
+          header.metadata_signature_len)
+      self._DisplaySignaturesBlob('Metadata', signatures_blob)
+    else:
+      print('No metadata signatures stored in the payload')
+
     manifest = self.payload.manifest
-    if not manifest.HasField('signatures_offset'):
-      print('No signatures stored in the payload')
-      return
+    if manifest.HasField('signatures_offset'):
+      signature_msg = 'blob_offset=%d' % manifest.signatures_offset
+      if manifest.signatures_size:
+        signature_msg += ' (%d bytes)' % manifest.signatures_size
+      DisplayValue('Payload signatures blob', signature_msg)
+      signatures_blob = self.payload.ReadDataBlob(manifest.signatures_offset,
+                                                  manifest.signatures_size)
+      self._DisplaySignaturesBlob('Payload', signatures_blob)
+    else:
+      print('No payload signatures stored in the payload')
 
-    signature_msg = 'offset=%d' % manifest.signatures_offset
-    if manifest.signatures_size:
-      signature_msg += ' (%d bytes)' % manifest.signatures_size
-    DisplayValue('Signature blob', signature_msg)
-    signatures_blob = self.payload.ReadDataBlob(manifest.signatures_offset,
-                                                manifest.signatures_size)
-
+  @staticmethod
+  def _DisplaySignaturesBlob(signature_name, signatures_blob):
     from dev.host.lib.update_payload import update_metadata_pb2
     signatures = update_metadata_pb2.Signatures()
     signatures.ParseFromString(signatures_blob)
-    print('Payload signatures: (%d entries)' % len(signatures.signatures))
+    print('%s signatures: (%d entries)' %
+          (signature_name, len(signatures.signatures)))
     for signature in signatures.signatures:
       print('  version=%s, hex_data: (%d bytes)' %
             (signature.version if signature.HasField('version') else None,
@@ -162,8 +185,12 @@
     read_blocks = 0
     written_blocks = 0
     num_write_seeks = 0
-    for operations in (manifest.install_operations,
-                       manifest.kernel_install_operations):
+    if self.payload.header.version == MAJOR_PAYLOAD_VERSION_BRILLO:
+      partitions_operations = [part.operations for part in manifest.partitions]
+    else:
+      partitions_operations = [manifest.install_operations,
+                               manifest.kernel_install_operations]
+    for operations in partitions_operations:
       last_ext = None
       for curr_op in operations:
         read_blocks += sum([ext.num_blocks for ext in curr_op.src_extents])
@@ -207,7 +234,12 @@
       self._DisplayStats(self.payload.manifest)
     if self.options.list_ops:
       print()
-      self._DisplayOps('Install operations',
-                       self.payload.manifest.install_operations)
-      self._DisplayOps('Kernel install operations',
-                       self.payload.manifest.kernel_install_operations)
+      if self.payload.header.version == MAJOR_PAYLOAD_VERSION_BRILLO:
+        for partition in self.payload.manifest.partitions:
+          self._DisplayOps('%s install operations' % partition.partition_name,
+                           partition.operations)
+      else:
+        self._DisplayOps('Install operations',
+                         self.payload.manifest.install_operations)
+        self._DisplayOps('Kernel install operations',
+                         self.payload.manifest.kernel_install_operations)
diff --git a/cli/cros/cros_payload_unittest.py b/cli/cros/cros_payload_unittest.py
index d99f0e9..36f4749 100644
--- a/cli/cros/cros_payload_unittest.py
+++ b/cli/cros/cros_payload_unittest.py
@@ -50,10 +50,17 @@
   def HasField(self, field):
     return hasattr(self, field)
 
+class FakePartition(object):
+  """Fake PartitionUpdate field for testing."""
+
+  def __init__(self, partition_name, operations):
+    self.partition_name = partition_name
+    self.operations = operations
+
 class FakeManifest(object):
   """Fake manifest for testing."""
 
-  def __init__(self):
+  def __init__(self, major_version):
     FakeExtent = collections.namedtuple('FakeExtent',
                                         ['start_block', 'num_blocks'])
     self.install_operations = [FakeOp([],
@@ -67,6 +74,11 @@
         [FakeExtent(x, x) for x in xrange(20)],
         update_payload.common.OpType.SOURCE_COPY,
         src_length=4096)]
+    if major_version == cros_payload.MAJOR_PAYLOAD_VERSION_BRILLO:
+      self.partitions = [FakePartition('rootfs', self.install_operations),
+                         FakePartition('kernel',
+                                       self.kernel_install_operations)]
+      self.install_operations = self.kernel_install_operations = []
     self.block_size = 4096
     self.minor_version = 4
     FakePartInfo = collections.namedtuple('FakePartInfo', ['size'])
@@ -81,19 +93,32 @@
     """Fake HasField method based on the python members."""
     return hasattr(self, field_name) and getattr(self, field_name) is not None
 
+class FakeHeader(object):
+  """Fake payload header for testing."""
+
+  def __init__(self, version, manifest_len, metadata_signature_len):
+    self.version = version
+    self.manifest_len = manifest_len
+    self.metadata_signature_len = metadata_signature_len
+
+  @property
+  def size(self):
+    return (20 if self.version == cros_payload.MAJOR_PAYLOAD_VERSION_CHROMEOS
+            else 24)
+
+
 class FakePayload(object):
   """Fake payload for testing."""
 
-  def __init__(self):
-    FakeHeader = collections.namedtuple('FakeHeader',
-                                        ['version', 'manifest_len'])
-    self._header = FakeHeader('111', 222)
+  def __init__(self, major_version):
+    self._header = FakeHeader(major_version, 222, 0)
     self.header = None
-    self._manifest = FakeManifest()
+    self._manifest = FakeManifest(major_version)
     self.manifest = None
 
     self._blobs = {}
-    self._signatures = update_metadata_pb2.Signatures()
+    self._payload_signatures = update_metadata_pb2.Signatures()
+    self._metadata_signatures = update_metadata_pb2.Signatures()
 
   def Init(self):
     """Fake Init that sets header and manifest.
@@ -114,15 +139,28 @@
                              'actual: %d)' % (len(blob), length))
     return blob
 
-  def AddSignature(self, **kwargs):
-    new_signature = self._signatures.signatures.add()
+  @staticmethod
+  def _AddSignatureToProto(proto, **kwargs):
+    """Add a new Signature element to the passed proto."""
+    new_signature = proto.signatures.add()
     for key, val in kwargs.iteritems():
       setattr(new_signature, key, val)
-    blob = self._signatures.SerializeToString()
+
+  def AddPayloadSignature(self, **kwargs):
+    self._AddSignatureToProto(self._payload_signatures, **kwargs)
+    blob = self._payload_signatures.SerializeToString()
     self._manifest.signatures_offset = 1234
     self._manifest.signatures_size = len(blob)
     self._blobs[self._manifest.signatures_offset] = blob
 
+  def AddMetadataSignature(self, **kwargs):
+    self._AddSignatureToProto(self._metadata_signatures, **kwargs)
+    if self._header.metadata_signature_len:
+      del self._blobs[-self._header.metadata_signature_len]
+    blob = self._metadata_signatures.SerializeToString()
+    self._header.metadata_signature_len = len(blob)
+    self._blobs[-len(blob)] = blob
+
 
 class PayloadCommandTest(cros_test_lib.MockOutputTestCase):
   """Test class for our PayloadCommand class."""
@@ -137,13 +175,14 @@
   def testRun(self):
     """Verify that Run parses and displays the payload like we expect."""
     payload_cmd = cros_payload.PayloadCommand(FakeOption(action='show'))
-    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload(
+        cros_payload.MAJOR_PAYLOAD_VERSION_CHROMEOS))
 
     with self.OutputCapturer() as output:
       payload_cmd.Run()
 
     stdout = output.GetStdout()
-    expected_out = """Payload version:         111
+    expected_out = """Payload version:         1
 Manifest length:         222
 Number of operations:    1
 Number of kernel ops:    1
@@ -152,17 +191,18 @@
 """
     self.assertEquals(stdout, expected_out)
 
-  def testListOps(self):
+  def testListOpsOnVersion1(self):
     """Verify that the --list_ops option gives the correct output."""
     payload_cmd = cros_payload.PayloadCommand(FakeOption(list_ops=True,
                                                          action='show'))
-    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload(
+        cros_payload.MAJOR_PAYLOAD_VERSION_CHROMEOS))
 
     with self.OutputCapturer() as output:
       payload_cmd.Run()
 
     stdout = output.GetStdout()
-    expected_out = """Payload version:         111
+    expected_out = """Payload version:         1
 Manifest length:         222
 Number of operations:    1
 Number of kernel ops:    1
@@ -185,17 +225,53 @@
 """
     self.assertEquals(stdout, expected_out)
 
-  def testStats(self):
-    """Verify that the --stats option works correctly."""
-    payload_cmd = cros_payload.PayloadCommand(FakeOption(stats=True,
+  def testListOpsOnVersion2(self):
+    """Verify that the --list_ops option gives the correct output."""
+    payload_cmd = cros_payload.PayloadCommand(FakeOption(list_ops=True,
                                                          action='show'))
-    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload(
+        cros_payload.MAJOR_PAYLOAD_VERSION_BRILLO))
 
     with self.OutputCapturer() as output:
       payload_cmd.Run()
 
     stdout = output.GetStdout()
-    expected_out = """Payload version:         111
+    expected_out = """Payload version:         2
+Manifest length:         222
+Number of partitions:    2
+  Number of "rootfs" ops: 1
+  Number of "kernel" ops: 1
+Block size:              4096
+Minor version:           4
+
+rootfs install operations:
+  0: REPLACE_BZ
+    Data offset: 1
+    Data length: 1
+    Destination: 2 extents (3 blocks)
+      (1,1) (2,2)
+kernel install operations:
+  0: SOURCE_COPY
+    Source: 1 extent (1 block)
+      (1,1)
+    Destination: 20 extents (190 blocks)
+      (0,0) (1,1) (2,2) (3,3) (4,4) (5,5) (6,6) (7,7) (8,8) (9,9) (10,10)
+      (11,11) (12,12) (13,13) (14,14) (15,15) (16,16) (17,17) (18,18) (19,19)
+"""
+    self.assertEquals(stdout, expected_out)
+
+  def testStatsOnVersion1(self):
+    """Verify that the --stats option works correctly."""
+    payload_cmd = cros_payload.PayloadCommand(FakeOption(stats=True,
+                                                         action='show'))
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload(
+        cros_payload.MAJOR_PAYLOAD_VERSION_CHROMEOS))
+
+    with self.OutputCapturer() as output:
+      payload_cmd.Run()
+
+    stdout = output.GetStdout()
+    expected_out = """Payload version:         1
 Manifest length:         222
 Number of operations:    1
 Number of kernel ops:    1
@@ -207,23 +283,49 @@
 """
     self.assertEquals(stdout, expected_out)
 
+  def testStatsOnVersion2(self):
+    """Verify that the --stats option works correctly on version 2."""
+    payload_cmd = cros_payload.PayloadCommand(FakeOption(stats=True,
+                                                         action='show'))
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload(
+        cros_payload.MAJOR_PAYLOAD_VERSION_BRILLO))
+
+    with self.OutputCapturer() as output:
+      payload_cmd.Run()
+
+    stdout = output.GetStdout()
+    expected_out = """Payload version:         2
+Manifest length:         222
+Number of partitions:    2
+  Number of "rootfs" ops: 1
+  Number of "kernel" ops: 1
+Block size:              4096
+Minor version:           4
+Blocks read:             11
+Blocks written:          193
+Seeks when writing:      18
+"""
+    self.assertEquals(stdout, expected_out)
+
   def testEmptySignatures(self):
     """Verify that the --signatures option works with unsigned payloads."""
     payload_cmd = cros_payload.PayloadCommand(
         FakeOption(action='show', signatures=True))
-    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload(
+        cros_payload.MAJOR_PAYLOAD_VERSION_CHROMEOS))
 
     with self.OutputCapturer() as output:
       payload_cmd.Run()
 
     stdout = output.GetStdout()
-    expected_out = """Payload version:         111
+    expected_out = """Payload version:         1
 Manifest length:         222
 Number of operations:    1
 Number of kernel ops:    1
 Block size:              4096
 Minor version:           4
-No signatures stored in the payload
+No metadata signatures stored in the payload
+No payload signatures stored in the payload
 """
     self.assertEquals(stdout, expected_out)
 
@@ -232,22 +334,29 @@
     """Verify that the --signatures option shows the present signatures."""
     payload_cmd = cros_payload.PayloadCommand(
         FakeOption(action='show', signatures=True))
-    payload = FakePayload()
-    payload.AddSignature(version=1, data='12345678abcdefgh\x00\x01\x02\x03')
-    payload.AddSignature(data='I am a signature so access is yes.')
+    payload = FakePayload(cros_payload.MAJOR_PAYLOAD_VERSION_BRILLO)
+    payload.AddPayloadSignature(version=1,
+                                data='12345678abcdefgh\x00\x01\x02\x03')
+    payload.AddPayloadSignature(data='I am a signature so access is yes.')
+    payload.AddMetadataSignature(data='\x00\x0a\x0c')
     self.PatchObject(update_payload, 'Payload', return_value=payload)
 
     with self.OutputCapturer() as output:
       payload_cmd.Run()
 
     stdout = output.GetStdout()
-    expected_out = """Payload version:         111
+    expected_out = """Payload version:         2
 Manifest length:         222
-Number of operations:    1
-Number of kernel ops:    1
+Number of partitions:    2
+  Number of "rootfs" ops: 1
+  Number of "kernel" ops: 1
 Block size:              4096
 Minor version:           4
-Signature blob:          offset=1234 (64 bytes)
+Metadata signatures blob: file_offset=246 (7 bytes)
+Metadata signatures: (1 entries)
+  version=None, hex_data: (3 bytes)
+    00 0a 0c                                        | ...
+Payload signatures blob: blob_offset=1234 (64 bytes)
 Payload signatures: (2 entries)
   version=1, hex_data: (20 bytes)
     31 32 33 34 35 36 37 38 61 62 63 64 65 66 67 68 | 12345678abcdefgh
diff --git a/cli/cros/lint.py b/cli/cros/lint.py
index f54c8a6..36d07e8 100644
--- a/cli/cros/lint.py
+++ b/cli/cros/lint.py
@@ -385,6 +385,8 @@
   class _MessageR9200(object): pass
   class _MessageR9201(object): pass
   class _MessageR9202(object): pass
+  class _MessageR9203(object): pass
+  class _MessageR9210(object): pass
   # pylint: enable=class-missing-docstring,multiple-statements
 
   name = 'source_checker'
@@ -397,6 +399,10 @@
                 ('missing-shebang'), _MessageR9201),
       'R9202': ('Shebang is set, but file is not executable',
                 ('spurious-shebang'), _MessageR9202),
+      'R9203': ('Unittest not named xxx_unittest.py',
+                ('unittest-misnamed'), _MessageR9203),
+      'R9210': ('Trailing new lines found at end of file',
+                ('excess-trailing-newlines'), _MessageR9210),
   }
   options = ()
 
@@ -405,6 +411,8 @@
     stream = node.file_stream
     stream.seek(0)
     self._check_shebang(node, stream)
+    self._check_module_name(node)
+    self._check_trailing_lines(node, stream)
 
   def _check_shebang(self, _node, stream):
     """Verify the shebang is version specific"""
@@ -424,6 +432,21 @@
     if parts[0] not in ('#!/usr/bin/python2', '#!/usr/bin/python3'):
       self.add_message('R9200')
 
+  def _check_module_name(self, node):
+    """Make sure the module name is sane"""
+    # Catch various typos.
+    name = node.name.rsplit('.', 2)[-1]
+    if name.rsplit('_', 2)[-1] in ('unittests',):
+      self.add_message('R9203')
+
+  def _check_trailing_lines(self, _node, stream):
+    """Reject trailing lines"""
+    st = os.fstat(stream.fileno())
+    if st.st_size > 1:
+      stream.seek(st.st_size - 2)
+      if not stream.read().strip('\n'):
+        self.add_message('R9210')
+
 
 class ChromiteLoggingChecker(BaseChecker):
   """Make sure we enforce rules on importing logging."""
diff --git a/cli/cros/lint_unittest.py b/cli/cros/lint_unittest.py
index b78c655..20b86c7 100644
--- a/cli/cros/lint_unittest.py
+++ b/cli/cros/lint_unittest.py
@@ -23,7 +23,7 @@
   Arg = collections.namedtuple('Arg', ('name',))
 
   def __init__(self, doc='', fromlineno=0, path='foo.py', args=(), vararg='',
-               kwarg='', names=None, lineno=0):
+               kwarg='', names=None, lineno=0, name='module'):
     if names is None:
       names = [('name', None)]
     self.doc = doc
@@ -34,6 +34,7 @@
     self.args = self.Args(args=[self.Arg(name=x) for x in args],
                           vararg=vararg, kwarg=kwarg)
     self.names = names
+    self.name = name
 
   def argnames(self):
     return self.args
@@ -396,3 +397,25 @@
     )
     with open('/bin/sh') as f:
       self._testShebang(shebangs, 0, f.fileno())
+
+  def testGoodUnittestName(self):
+    """Verify _check_module_name accepts good unittest names"""
+    module_names = (
+        'lint_unittest',
+    )
+    for name in module_names:
+      node = TestNode(name=name)
+      self.results = []
+      self.checker._check_module_name(node)
+      self.assertEqual(len(self.results), 0)
+
+  def testBadUnittestName(self):
+    """Verify _check_module_name accepts good unittest names"""
+    module_names = (
+        'lint_unittests',
+    )
+    for name in module_names:
+      node = TestNode(name=name)
+      self.results = []
+      self.checker._check_module_name(node)
+      self.assertEqual(len(self.results), 1)
diff --git a/cli/flash_unittest.py b/cli/flash_unittest.py
index 2a7a868..7a18d4f 100644
--- a/cli/flash_unittest.py
+++ b/cli/flash_unittest.py
@@ -19,7 +19,6 @@
 from chromite.lib import osutils
 from chromite.lib import partial_mock
 from chromite.lib import remote_access
-from chromite.lib import workspace_lib
 
 
 class RemoteDeviceUpdaterMock(partial_mock.PartialCmdMock):
@@ -62,7 +61,6 @@
     self.PatchObject(dev_server_wrapper, 'GetUpdatePayloads')
     self.PatchObject(remote_access, 'CHECK_INTERVAL', new=0)
     self.PatchObject(remote_access, 'ChromiumOSDevice')
-    self.PatchObject(workspace_lib, 'WorkspacePath', return_value=None)
 
   def testUpdateAll(self):
     """Tests that update methods are called correctly."""
@@ -141,7 +139,6 @@
     self.PatchObject(os.path, 'exists', return_value=True)
     self.isgpt_mock = self.PatchObject(flash, '_IsFilePathGPTDiskImage',
                                        return_value=True)
-    self.PatchObject(workspace_lib, 'WorkspacePath', return_value=None)
 
   def testLocalImagePathCopy(self):
     """Tests that imaging methods are called correctly."""
diff --git a/compute/bot_constants.py b/compute/bot_constants.py
index aa8e1ef..286bef5 100644
--- a/compute/bot_constants.py
+++ b/compute/bot_constants.py
@@ -15,8 +15,10 @@
 CHROMITE_URL = 'https://chromium.googlesource.com/chromiumos/chromite'
 DEPOT_TOOLS_URL = ('https://chromium.googlesource.com/chromium/tools/'
                    'depot_tools.git')
-BUILDBOT_GIT_REPO = ('https://chrome-internal.googlesource.com/chrome/tools/'
-                     'build/internal.DEPS')
+CHROME_INFRA_SLAVE_REPO = ('https://chrome-internal.googlesource.com/chrome/'
+                           'tools/build/internal.DEPS.git')
+CHROME_INFRA_SLAVE_DEPS_FILE = '.DEPS.git'
+
 CHROMIUM_BUILD_URL = 'https://chromium.googlesource.com/chromium/src/build'
 GCOMPUTE_TOOLS_URL = 'https://gerrit.googlesource.com/gcompute-tools'
 
diff --git a/compute/compute_configs.py b/compute/compute_configs.py
index 502db86..fece3b5 100644
--- a/compute/compute_configs.py
+++ b/compute/compute_configs.py
@@ -6,7 +6,7 @@
 
 from __future__ import print_function
 
-from chromite.cbuildbot import constants
+from chromite.cbuildbot import config_lib
 
 
 # Metadata keys to tag our GCE artifacts with.
@@ -23,7 +23,8 @@
 # TODO: We do not archive the official images to Google Storage yet
 # because the imaging creating process for this path does not allow
 # the rootfs to be larger than 10GB.
-GS_IMAGE_ARCHIVE_BASE_URL = '%s/gce-images' % constants.DEFAULT_ARCHIVE_BUCKET
+GS_IMAGE_ARCHIVE_BASE_URL = ('%s/gce-images' %
+                             config_lib.GetConfig().params.ARCHIVE_URL)
 IMAGE_SUFFIX = '.tar.gz'
 
 BOOT_DISK = '/dev/sda'
diff --git a/compute/setup_bot.py b/compute/setup_bot.py
index ff4e923..84d5fe9 100644
--- a/compute/setup_bot.py
+++ b/compute/setup_bot.py
@@ -191,7 +191,8 @@
     # `gclient` relies on depot_tools in $PATH, pass the extra
     # envinornment variable.
     path_env = '%s:%s' % (os.getenv('PATH'), tmp_depot_tools_path)
-    RunCommand(['gclient', 'config', bot_constants.BUILDBOT_GIT_REPO],
+    RunCommand(['gclient', 'config', bot_constants.CHROME_INFRA_SLAVE_REPO,
+                '--deps-file', bot_constants.CHROME_INFRA_SLAVE_DEPS_FILE],
                cwd=bot_constants.BUILDBOT_DIR, extra_env={'PATH': path_env})
     RunCommand(['gclient', 'sync', '--jobs', '5'],
                cwd=bot_constants.BUILDBOT_DIR,
diff --git a/lib/blueprint_lib.py b/lib/blueprint_lib.py
deleted file mode 100644
index befe94c..0000000
--- a/lib/blueprint_lib.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utilities to work with blueprints."""
-
-from __future__ import print_function
-
-import os
-
-from chromite.lib import brick_lib
-from chromite.lib import workspace_lib
-
-
-# Field names for specifying initial configuration.
-APP_ID_FIELD = 'buildTargetId'
-BRICKS_FIELD = 'bricks'
-BSP_FIELD = 'bsp'
-
-# Those packages are implicitly built for all blueprints.
-# - target-os is needed to build any image.
-# - target-os-dev and target-os-test are needed to build a developer friendly
-#   image. They should not be included in any production images.
-_IMPLICIT_PACKAGES = (
-    'virtual/target-os',
-    'virtual/target-os-dev',
-    'virtual/target-os-test',
-)
-
-
-class BlueprintNotFoundError(Exception):
-  """The blueprint does not exist."""
-
-
-class BlueprintCreationError(Exception):
-  """Blueprint creation failed."""
-
-
-class Blueprint(object):
-  """Encapsulates the interaction with a blueprint."""
-
-  def __init__(self, blueprint_loc, initial_config=None):
-    """Instantiates a blueprint object.
-
-    Args:
-      blueprint_loc: blueprint locator.  This can be a relative path to CWD, an
-        absolute path, or a relative path to the root of the workspace prefixed
-        with '//'.
-      initial_config: A dictionary of key-value pairs to seed a new blueprint
-        with if the specified blueprint doesn't already exist.
-
-    Raises:
-      BlueprintNotFoundError: No blueprint exists at |blueprint_loc| and no
-        |initial_config| was given to create a new one.
-      BlueprintCreationError: |initial_config| was specified but a file
-        already exists at |blueprint_loc|.
-    """
-    self._path = (workspace_lib.LocatorToPath(blueprint_loc)
-                  if workspace_lib.IsLocator(blueprint_loc) else blueprint_loc)
-    self._locator = workspace_lib.PathToLocator(self._path)
-
-    if initial_config is not None:
-      self._CreateBlueprintConfig(initial_config)
-
-    try:
-      self.config = workspace_lib.ReadConfigFile(self._path)
-    except IOError:
-      raise BlueprintNotFoundError('Blueprint %s not found.' % self._path)
-
-  @property
-  def path(self):
-    return self._path
-
-  @property
-  def locator(self):
-    return self._locator
-
-  def _CreateBlueprintConfig(self, config):
-    """Create an initial blueprint config file.
-
-    Converts all brick paths in |config| into locators then saves the
-    configuration file to |self._path|.
-
-    Currently fails if |self._path| already exists, but could be
-    generalized to allow re-writing config files if needed.
-
-    Args:
-      config: configuration dictionary.
-
-    Raises:
-      BlueprintCreationError: A brick in |config| doesn't exist or an
-        error occurred while saving the config file.
-    """
-    if os.path.exists(self._path):
-      raise BlueprintCreationError('File already exists at %s.' % self._path)
-
-    try:
-      # Turn brick specifications into locators. If bricks or BSPs are
-      # unspecified, assign default values so the config file has the proper
-      # structure for easy manual editing.
-      if config.get(BRICKS_FIELD):
-        config[BRICKS_FIELD] = [brick_lib.Brick(b).brick_locator
-                                for b in config[BRICKS_FIELD]]
-      else:
-        config[BRICKS_FIELD] = []
-      if config.get(BSP_FIELD):
-        config[BSP_FIELD] = brick_lib.Brick(config[BSP_FIELD]).brick_locator
-      else:
-        config[BSP_FIELD] = None
-
-      # Create the config file.
-      workspace_lib.WriteConfigFile(self._path, config)
-    except (brick_lib.BrickNotFound, workspace_lib.ConfigFileError) as e:
-      raise BlueprintCreationError('Blueprint creation failed. %s' % e)
-
-  def GetBricks(self):
-    """Returns the bricks field of a blueprint."""
-    return self.config.get(BRICKS_FIELD, [])
-
-  def GetBSP(self):
-    """Returns the BSP field of a blueprint."""
-    return self.config.get(BSP_FIELD)
-
-  def GetAppId(self):
-    """Returns the APP_ID from a blueprint."""
-    app_id = self.config.get(APP_ID_FIELD)
-    return app_id
-
-  def FriendlyName(self):
-    """Returns the friendly name for this blueprint."""
-    return workspace_lib.LocatorToFriendlyName(self._locator)
-
-  def GetUsedBricks(self):
-    """Returns the set of bricks used by this blueprint."""
-    brick_map = {}
-    for top_brick in self.GetBricks() + [self.GetBSP()]:
-      for b in brick_lib.Brick(top_brick).BrickStack():
-        brick_map[b.brick_locator] = b
-
-    return brick_map.values()
-
-  def GetPackages(self, with_implicit=True):
-    """Returns the list of packages needed by this blueprint.
-
-    This includes the main packages for the bricks and the bsp of this
-    blueprint. We don't add the main packages of the bricks dependencies to
-    allow inheriting a brick without inheriting its required packages.
-
-    Args:
-      with_implicit: If True, include packages that are implicitly required by
-        the core system.
-    """
-    packages = []
-    for locator in self.GetBricks() + [self.GetBSP()]:
-      packages.extend(brick_lib.Brick(locator).MainPackages())
-
-    if with_implicit:
-      packages.extend(_IMPLICIT_PACKAGES)
-    return packages
diff --git a/lib/blueprint_lib_unittest b/lib/blueprint_lib_unittest
deleted file mode 120000
index 72196ce..0000000
--- a/lib/blueprint_lib_unittest
+++ /dev/null
@@ -1 +0,0 @@
-../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/blueprint_lib_unittest.py b/lib/blueprint_lib_unittest.py
deleted file mode 100644
index d4c2387..0000000
--- a/lib/blueprint_lib_unittest.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for the blueprint library."""
-
-from __future__ import print_function
-
-from chromite.lib import blueprint_lib
-from chromite.lib import brick_lib
-from chromite.lib import cros_test_lib
-from chromite.lib import osutils
-from chromite.lib import workspace_lib
-
-
-class BlueprintLibTest(cros_test_lib.WorkspaceTestCase):
-  """Unittest for blueprint_lib.py"""
-
-  def setUp(self):
-    self.CreateWorkspace()
-
-  def testBlueprint(self):
-    """Tests getting the basic blueprint getters."""
-    bricks = ['//foo', '//bar', '//baz']
-    for brick in bricks:
-      self.CreateBrick(brick)
-    self.CreateBrick('//bsp')
-    blueprint = self.CreateBlueprint(bricks=bricks, bsp='//bsp')
-    self.assertEqual(blueprint.GetBricks(), bricks)
-    self.assertEqual(blueprint.GetBSP(), '//bsp')
-
-  def testBlueprintNoBricks(self):
-    """Tests that blueprints without bricks return reasonable defaults."""
-    self.CreateBrick('//bsp2')
-    blueprint = self.CreateBlueprint(bsp='//bsp2')
-    self.assertEqual(blueprint.GetBricks(), [])
-    self.assertEqual(blueprint.GetBSP(), '//bsp2')
-
-  def testEmptyBlueprintFile(self):
-    """Tests that empty blueprints create the basic file structure."""
-    blueprint = self.CreateBlueprint()
-    file_contents = workspace_lib.ReadConfigFile(blueprint.path)
-
-    self.assertIn(blueprint_lib.BRICKS_FIELD, file_contents)
-    self.assertIn(blueprint_lib.BSP_FIELD, file_contents)
-
-  def testGetUsedBricks(self):
-    """Tests that we can list all the bricks used."""
-    brick_lib.Brick('//a', initial_config={'name':'a'})
-    brick_b = brick_lib.Brick('//b', initial_config={'name':'b'})
-    brick_c = brick_lib.Brick('//c',
-                              initial_config={'name':'c',
-                                              'dependencies': ['//b']})
-
-    blueprint = self.CreateBlueprint(name='foo.json',
-                                     bsp='//a', bricks=[brick_c.brick_locator])
-    self.assertEqual(3, len(blueprint.GetUsedBricks()))
-
-    # We sort out duplicates: c depends on b and b is explicitly listed in
-    # bricks too.
-    blueprint = self.CreateBlueprint(name='bar.json',
-                                     bsp='//a', bricks=[brick_c.brick_locator,
-                                                        brick_b.brick_locator])
-    self.assertEqual(3, len(blueprint.GetUsedBricks()))
-
-  def testGetPackages(self):
-    """Tests that we can get the needed packages for a given blueprint."""
-    self.CreateBrick('foo', main_package='app-misc/foopkg')
-    self.CreateBrick('bar', main_package='app-misc/barpkg')
-    self.CreateBrick('foobar', main_package='app-misc/foobarpkg',
-                     dependencies=['//foo', '//bar'])
-
-    self.CreateBrick('hello', main_package='app-misc/hello')
-
-    self.CreateBrick('mybsp', main_package='app-misc/bspbonjour')
-
-    blueprint = self.CreateBlueprint(name='//myblueprint',
-                                     bricks=['//hello', '//foobar'],
-                                     bsp='//mybsp')
-    packages = blueprint.GetPackages(with_implicit=False)
-    self.assertEqual(
-        set(('app-misc/foobarpkg', 'app-misc/hello', 'app-misc/bspbonjour')),
-        set(packages))
-
-    packages = blueprint.GetPackages(with_implicit=True)
-    self.assertTrue('virtual/target-os' in packages)
-    self.assertTrue('virtual/target-os-dev' in packages)
-    self.assertTrue('virtual/target-os-test' in packages)
-
-  def testBlueprintAlreadyExists(self):
-    """Tests creating a blueprint where one already exists."""
-    self.CreateBrick('//foo')
-    self.CreateBrick('//bar')
-    self.CreateBlueprint(name='//my_blueprint', bricks=['//foo'])
-    with self.assertRaises(blueprint_lib.BlueprintCreationError):
-      self.CreateBlueprint(name='//my_blueprint', bricks=['//bar'])
-    # Make sure the original blueprint is untouched.
-    self.assertEqual(['//foo'],
-                     blueprint_lib.Blueprint('//my_blueprint').GetBricks())
-
-  def testBlueprintBrickNotFound(self):
-    """Tests creating a blueprint with a non-existent brick fails."""
-    with self.assertRaises(blueprint_lib.BlueprintCreationError):
-      self.CreateBlueprint(name='//my_blueprint', bricks=['//none'])
-
-  def testBlueprintBSPNotFound(self):
-    """Tests creating a blueprint with a non-existent BSP fails."""
-    with self.assertRaises(blueprint_lib.BlueprintCreationError):
-      self.CreateBlueprint(name='//my_blueprint', bsp='//none')
-
-  def testBlueprintNotFound(self):
-    """Tests loading a non-existent blueprint file."""
-    with self.assertRaises(blueprint_lib.BlueprintNotFoundError):
-      blueprint_lib.Blueprint('//not/a/blueprint')
-
-  def testInvalidBlueprint(self):
-    """Tests loading an invalid blueprint file."""
-    path = workspace_lib.LocatorToPath('//invalid_file')
-    osutils.WriteFile(path, 'invalid contents')
-    with self.assertRaises(workspace_lib.ConfigFileError):
-      blueprint_lib.Blueprint(path)
diff --git a/lib/bootstrap_lib.py b/lib/bootstrap_lib.py
deleted file mode 100644
index 9734f2e..0000000
--- a/lib/bootstrap_lib.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Common utilities used by the chromium/bootstrap scripts."""
-
-from __future__ import print_function
-
-import os
-
-from chromite.lib import cros_build_lib
-from chromite.lib import project_sdk
-from chromite.lib import workspace_lib
-
-
-# This is the subdirectory of the bootstrap, where we store SDKs.
-SDK_CHECKOUTS = 'sdk_checkouts'
-
-
-# This env is used to remember the bootstrap path in child processes.
-BOOTSTRAP_PATH_ENV = 'BRILLO_BOOTSTRAP_PATH'
-
-
-def FindBootstrapPath(save_to_env=False):
-  """Find the bootstrap directory.
-
-  This is only possible, if the process was initially launched from a bootstrap
-  environment, and isn't inside a chroot.
-
-  Args:
-    save_to_env: If true, preserve the bootstrap path in an ENV for child
-                 processes. Only intended for the brillo bootstrap wrapper.
-
-  Returns:
-   Path to root of bootstrap, or None.
-  """
-  # We never have access to bootstrap if we are inside the chroot.
-  if cros_build_lib.IsInsideChroot():
-    return None
-
-  # See if the path has already been determined, especially in a parent wrapper
-  # process.
-  env_path = os.environ.get(BOOTSTRAP_PATH_ENV)
-  if env_path:
-    return env_path
-
-  # Base the bootstrap location on our current location, and remember it.
-  new_path = os.path.realpath(os.path.join(
-      os.path.abspath(__file__), '..', '..'))
-
-  # No repo checkout is a valid bootstrap environment, because the bootstrap
-  # environment holds repo checkouts inside SDK_CHECKOUTS, and repos cannot
-  # exist inside other repos.
-  if project_sdk.FindRepoRoot(new_path):
-    return None
-
-  if save_to_env:
-    os.environ[BOOTSTRAP_PATH_ENV] = new_path
-
-  return new_path
-
-
-def ComputeSdkPath(bootstrap_path, version):
-  """What directory should an SDK be in.
-
-  Args:
-    bootstrap_path: Bootstrap root directory, or None.
-    version: Version of the SDK.
-
-  Returns:
-    Path in which SDK version should be stored, or None.
-  """
-  if bootstrap_path is None:
-    return None
-
-  return os.path.join(bootstrap_path, SDK_CHECKOUTS, version)
-
-
-def GetActiveSdkPath(bootstrap_path, workspace_path):
-  """Find the SDK Path associated with a given workspace.
-
-  Most code should use constants.SOURCE_ROOT instead.
-
-  Args:
-    bootstrap_path: Path directory of the bootstrap dir (FindBootstrapPath()).
-    workspace_path: Path directory of the workspace (FindWorkspacePath()).
-
-  Returns:
-    Path to root directory of SDK, if there is an active one, and it exists.
-  """
-  if bootstrap_path is None:
-    return None
-
-  version = workspace_lib.GetActiveSdkVersion(workspace_path)
-  if version is None:
-    return None
-
-  sdk_root = ComputeSdkPath(bootstrap_path, version)
-
-  # Returns None if there is no active SDK version, or if it's not installed.
-  return sdk_root if os.path.isdir(sdk_root) else None
diff --git a/lib/bootstrap_lib_unittest b/lib/bootstrap_lib_unittest
deleted file mode 120000
index 72196ce..0000000
--- a/lib/bootstrap_lib_unittest
+++ /dev/null
@@ -1 +0,0 @@
-../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/bootstrap_lib_unittest.py b/lib/bootstrap_lib_unittest.py
deleted file mode 100644
index a913533..0000000
--- a/lib/bootstrap_lib_unittest.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for the project_sdk library."""
-
-from __future__ import print_function
-
-import mock
-import os
-
-from chromite.cbuildbot import constants
-from chromite.lib import cros_build_lib
-from chromite.lib import cros_test_lib
-from chromite.lib import osutils
-from chromite.lib import bootstrap_lib
-from chromite.lib import project_sdk
-from chromite.lib import workspace_lib
-
-# pylint: disable=protected-access
-
-class ProjectSdkTest(cros_test_lib.WorkspaceTestCase):
-  """Unittest for bootstrap_lib.py"""
-
-  def setUp(self):
-    self.version = '1.2.3'
-
-    # Don't use "CreateBootstrap" since it mocks out the method we are testing.
-    self.bootstrap_path = os.path.join(self.tempdir, 'bootstrap')
-    self.CreateWorkspace()
-
-
-  @mock.patch.object(project_sdk, 'FindRepoRoot')
-  @mock.patch.object(cros_build_lib, 'IsInsideChroot')
-  def _RunFindBootstrapPath(self, env, repo, chroot,
-                            expected_path, expected_env,
-                            mock_chroot, mock_repo):
-
-    orig_env = os.environ.copy()
-
-    try:
-      # Setup the ENV as requested.
-      if env is not None:
-        os.environ[bootstrap_lib.BOOTSTRAP_PATH_ENV] = env
-      else:
-        os.environ.pop(bootstrap_lib.BOOTSTRAP_PATH_ENV, None)
-
-      # Setup mocks, as requested.
-      mock_repo.return_value = repo
-      mock_chroot.return_value = chroot
-
-      # Verify that ENV is modified, if save is False.
-      self.assertEqual(bootstrap_lib.FindBootstrapPath(), expected_path)
-      self.assertEqual(os.environ.get(bootstrap_lib.BOOTSTRAP_PATH_ENV), env)
-
-      # The test environment is fully setup, run the test.
-      self.assertEqual(bootstrap_lib.FindBootstrapPath(True), expected_path)
-      self.assertEqual(os.environ.get(bootstrap_lib.BOOTSTRAP_PATH_ENV),
-                       expected_env)
-
-    finally:
-      # Restore the ENV.
-      osutils.SetEnvironment(orig_env)
-
-
-  def testFindBootstrapPath(self):
-    real_result = constants.CHROMITE_DIR
-
-    # Test first call in a bootstrap env. Exact results not verified.
-    self._RunFindBootstrapPath(None, None, False,
-                               real_result, real_result)
-
-    # Test first call after bootstrap outside an SDK. Not an expected env.
-    self._RunFindBootstrapPath('/foo', None, False,
-                               '/foo', '/foo')
-
-    # Test first call after bootstrap inside an SDK.
-    self._RunFindBootstrapPath('/foo', '/', False,
-                               '/foo', '/foo')
-
-    # Test first call without bootstrap inside an SDK. Error Case.
-    self._RunFindBootstrapPath(None, '/', False,
-                               None, None)
-
-    # Test all InsideChroot Cases.
-    self._RunFindBootstrapPath(None, None, True,
-                               None, None)
-    self._RunFindBootstrapPath('/foo', None, True,
-                               None, '/foo')
-    self._RunFindBootstrapPath('/foo', '/', True,
-                               None, '/foo')
-    self._RunFindBootstrapPath(None, '/', True,
-                               None, None)
-
-  def testComputeSdkPath(self):
-    # Try to compute path, with no valid bootstrap path.
-    self.assertEqual(None, bootstrap_lib.ComputeSdkPath(None, '1.2.3'))
-
-    self.assertEqual(
-        '/foo/bootstrap/sdk_checkouts/1.2.3',
-        bootstrap_lib.ComputeSdkPath('/foo/bootstrap', '1.2.3'))
-
-  def testGetActiveSdkPath(self):
-    # Try to find SDK Path with no valid bootstrap path.
-    sdk_dir = bootstrap_lib.GetActiveSdkPath(None,
-                                             self.workspace_path)
-    self.assertEqual(None, sdk_dir)
-
-    # Try to find SDK Path of workspace with no active SDK.
-    sdk_dir = bootstrap_lib.GetActiveSdkPath(self.bootstrap_path,
-                                             self.workspace_path)
-    self.assertEqual(None, sdk_dir)
-
-    # Try to find SDK Path of workspace with active SDK, but SDK doesn't exist.
-    workspace_lib.SetActiveSdkVersion(self.workspace_path, self.version)
-    sdk_dir = bootstrap_lib.GetActiveSdkPath(self.bootstrap_path,
-                                             self.workspace_path)
-    self.assertEqual(None, sdk_dir)
-
-    # 'Create' the active SDK.
-    expected_sdk_root = bootstrap_lib.ComputeSdkPath(self.bootstrap_path,
-                                                     self.version)
-    osutils.SafeMakedirs(expected_sdk_root)
-
-    # Verify that we can Find it now.
-    sdk_dir = bootstrap_lib.GetActiveSdkPath(self.bootstrap_path,
-                                             self.workspace_path)
-    self.assertEqual(expected_sdk_root, sdk_dir)
diff --git a/lib/brick_lib.py b/lib/brick_lib.py
deleted file mode 100644
index 0925688..0000000
--- a/lib/brick_lib.py
+++ /dev/null
@@ -1,273 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Common brick related utilities."""
-
-from __future__ import print_function
-
-import os
-
-from chromite.lib import osutils
-from chromite.lib import workspace_lib
-
-_DEFAULT_LAYOUT_CONF = {'profile_eapi_when_unspecified': '5-progress',
-                        'profile-formats': 'portage-2 profile-default-eapi',
-                        'thin-manifests': 'true',
-                        'use-manifests': 'true'}
-
-_CONFIG_FILE = 'config.json'
-
-_IGNORED_OVERLAYS = ('portage-stable', 'chromiumos', 'eclass-overlay')
-
-
-class BrickCreationFailed(Exception):
-  """The brick creation failed."""
-
-
-class BrickNotFound(Exception):
-  """The brick does not exist."""
-
-
-class BrickFeatureNotSupported(Exception):
-  """Attempted feature not supported for this brick."""
-
-
-class Brick(object):
-  """Encapsulates the interaction with a brick."""
-
-  def __init__(self, brick_loc, initial_config=None, allow_legacy=True):
-    """Instantiates a brick object.
-
-    Args:
-      brick_loc: brick locator. This can be a relative path to CWD, an absolute
-        path, a public board name prefix with 'board:' or a relative path to the
-        root of the workspace, prefixed with '//').
-      initial_config: The initial configuration as a python dictionary.
-        If not None, creates a brick with this configuration.
-      allow_legacy: Allow board overlays, simulating a basic read-only config.
-        Ignored if |initial_config| is not None.
-
-    Raises:
-      ValueError: If |brick_loc| is invalid.
-      LocatorNotResolved: |brick_loc| is valid but could not be resolved.
-      BrickNotFound: If |brick_loc| does not point to a brick and no initial
-        config was provided.
-      BrickCreationFailed: when the brick could not be created successfully.
-    """
-    if workspace_lib.IsLocator(brick_loc):
-      self.brick_dir = workspace_lib.LocatorToPath(brick_loc)
-      self.brick_locator = brick_loc
-    else:
-      self.brick_dir = brick_loc
-      self.brick_locator = workspace_lib.PathToLocator(brick_loc)
-
-    self.config = None
-    self.legacy = False
-    config_json = os.path.join(self.brick_dir, _CONFIG_FILE)
-
-    if not os.path.exists(config_json):
-      if initial_config:
-        if os.path.exists(self.brick_dir):
-          raise BrickCreationFailed('directory %s already exists.'
-                                    % self.brick_dir)
-        success = False
-        try:
-          self.UpdateConfig(initial_config)
-          osutils.SafeMakedirs(self.OverlayDir())
-          osutils.SafeMakedirs(self.SourceDir())
-          success = True
-        except BrickNotFound as e:
-          # If BrickNotFound was raised, the dependencies contain a missing
-          # brick.
-          raise BrickCreationFailed('dependency not found %s' % e)
-        finally:
-          if not success:
-            # If the brick creation failed for any reason, cleanup the partially
-            # created brick.
-            osutils.RmDir(self.brick_dir, ignore_missing=True)
-
-      elif allow_legacy:
-        self.legacy = True
-        try:
-          masters = self._ReadLayoutConf().get('masters')
-          masters_list = masters.split() if masters else []
-
-          # Keep general Chromium OS overlays out of this list as they are
-          # handled separately by the build system.
-          deps = ['board:' + d for d in masters_list
-                  if d not in _IGNORED_OVERLAYS]
-          self.config = {'name': self._ReadLayoutConf()['repo-name'],
-                         'dependencies': deps}
-        except (IOError, KeyError):
-          pass
-
-      if self.config is None:
-        raise BrickNotFound('Brick not found at %s' % self.brick_dir)
-    elif initial_config is None:
-      self.config = workspace_lib.ReadConfigFile(config_json)
-    else:
-      raise BrickCreationFailed('brick %s already exists.' % self.brick_dir)
-
-    self.friendly_name = None
-    if not self.legacy:
-      self.friendly_name = workspace_lib.LocatorToFriendlyName(
-          self.brick_locator)
-
-  def _LayoutConfPath(self):
-    """Returns the path to the layout.conf file."""
-    return os.path.join(self.OverlayDir(), 'metadata', 'layout.conf')
-
-  def _WriteLayoutConf(self, content):
-    """Writes layout.conf.
-
-    Sets unset fields to a sensible default and write |content| in layout.conf
-    in the right format.
-
-    Args:
-      content: dictionary containing the set fields in layout.conf.
-    """
-    for k, v in _DEFAULT_LAYOUT_CONF.iteritems():
-      content.setdefault(k, v)
-
-    content_str = ''.join(['%s = %s\n' % (k, v)
-                           for k, v in content.iteritems()])
-    osutils.WriteFile(self._LayoutConfPath(), content_str, makedirs=True)
-
-  def _ReadLayoutConf(self):
-    """Returns the content of layout.conf as a Python dictionary."""
-    def ParseConfLine(line):
-      k, _, v = line.partition('=')
-      return k.strip(), v.strip() or None
-
-    content_str = osutils.ReadFile(self._LayoutConfPath())
-    return dict(ParseConfLine(line) for line in content_str.splitlines())
-
-  def UpdateConfig(self, config, regenerate=True):
-    """Updates the brick's configuration.
-
-    Writes |config| to the configuration file.
-    If |regenerate| is true, regenerate the portage configuration files in
-    this brick to match the new configuration.
-
-    Args:
-      config: brick configuration as a python dict.
-      regenerate: if True, regenerate autogenerated brick files.
-    """
-    if self.legacy:
-      raise BrickFeatureNotSupported(
-          'Cannot update configuration of legacy brick %s' % self.brick_dir)
-
-    self.config = config
-    # All objects must be unambiguously referenced. Normalize all the
-    # dependencies according to the workspace.
-    self.config['dependencies'] = [d if workspace_lib.IsLocator(d)
-                                   else workspace_lib.PathToLocator(d)
-                                   for d in self.config.get('dependencies', [])]
-
-    workspace_lib.WriteConfigFile(os.path.join(self.brick_dir, _CONFIG_FILE),
-                                  config)
-
-    if regenerate:
-      self.GeneratePortageConfig()
-
-  def GeneratePortageConfig(self):
-    """Generates all autogenerated brick files."""
-    # We don't generate anything in legacy brick so everything is up-to-date.
-    if self.legacy:
-      return
-
-    deps = [b.config['name'] for b in self.Dependencies()]
-
-    self._WriteLayoutConf(
-        {'masters': ' '.join(
-            ['eclass-overlay', 'portage-stable', 'chromiumos'] + deps),
-         'repo-name': self.config['name']})
-
-  def Dependencies(self):
-    """Returns the dependent bricks."""
-    return [Brick(d) for d in self.config.get('dependencies', [])]
-
-  def Inherits(self, brick_name):
-    """Checks whether this brick contains |brick_name|.
-
-    Args:
-      brick_name: The name of the brick to check containment.
-
-    Returns:
-      Whether |brick_name| is contained in this brick.
-    """
-    return brick_name in [b.config['name'] for b in self.BrickStack()]
-
-  def MainPackages(self):
-    """Returns the brick's main package(s).
-
-    This finds the 'main_package' property.  It nevertheless returns a (single
-    element) list as it is easier to work with.
-
-    Returns:
-      A list of main packages; empty if no main package configured.
-    """
-    main_package = self.config.get('main_package')
-    return [main_package] if main_package else []
-
-  def OverlayDir(self):
-    """Returns the brick's overlay directory."""
-    if self.legacy:
-      return self.brick_dir
-
-    return os.path.join(self.brick_dir, 'packages')
-
-  def SourceDir(self):
-    """Returns the project's source directory."""
-    return os.path.join(self.brick_dir, 'src')
-
-  def FriendlyName(self):
-    """Return the friendly name for this brick.
-
-    This name is used as the board name for legacy commands (--board).
-    """
-    if self.friendly_name is None:
-      raise BrickFeatureNotSupported()
-    return self.friendly_name
-
-  def BrickStack(self):
-    """Returns the brick stack for this brick.
-
-    Returns:
-      A list of bricks, respecting the partial ordering of bricks as defined by
-      dependencies, ordered from the lowest priority to the highest priority.
-    """
-    seen = set()
-    def _stack(brick):
-      seen.add(brick.brick_dir)
-      l = []
-      for dep in brick.Dependencies():
-        if dep.brick_dir not in seen:
-          l.extend(_stack(dep))
-      l.append(brick)
-      return l
-
-    return _stack(self)
-
-
-def FindBrickInPath(path=None):
-  """Returns the root directory of the brick containing a path.
-
-  Return the first parent directory of |path| that is the root of a brick.
-  This method is used for brick auto-detection and does not consider legacy.
-
-  Args:
-    path: path to a directory. If |path| is None, |path| will be set to CWD.
-
-  Returns:
-    The path to the first parent that is a brick directory if one exist.
-    Otherwise return None.
-  """
-  for p in osutils.IteratePathParents(path or os.getcwd()):
-    try:
-      return Brick(p, allow_legacy=False)
-    except BrickNotFound:
-      pass
-
-  return None
diff --git a/lib/brick_lib_unittest.py b/lib/brick_lib_unittest.py
deleted file mode 100644
index 9eff5b2..0000000
--- a/lib/brick_lib_unittest.py
+++ /dev/null
@@ -1,217 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for the brick library."""
-
-from __future__ import print_function
-
-import os
-
-from chromite.cbuildbot import constants
-from chromite.lib import brick_lib
-from chromite.lib import cros_test_lib
-from chromite.lib import osutils
-from chromite.lib import workspace_lib
-
-
-class BrickLibTest(cros_test_lib.WorkspaceTestCase):
-  """Unittest for brick.py"""
-
-  # pylint: disable=protected-access
-
-  def setUp(self):
-    self.CreateWorkspace()
-
-  def SetupLegacyBrick(self, brick_dir=None, brick_name='foo'):
-    """Sets up a legacy brick layout."""
-    if brick_dir is None:
-      brick_dir = self.workspace_path
-    layout_conf = 'repo-name = %s\n' % brick_name
-    osutils.WriteFile(os.path.join(brick_dir, 'metadata', 'layout.conf'),
-                      layout_conf, makedirs=True)
-
-  def testLayoutFormat(self):
-    """Test that layout.conf is correctly formatted."""
-    brick = self.CreateBrick()
-    content = {'repo-name': 'hello',
-               'bar': 'foo'}
-    brick._WriteLayoutConf(content)
-
-    path = os.path.join(brick.OverlayDir(), 'metadata', 'layout.conf')
-    layout_conf = osutils.ReadFile(path).split('\n')
-
-    expected_lines = ['repo-name = hello',
-                      'bar = foo',
-                      'profile-formats = portage-2 profile-default-eapi']
-    for line in expected_lines:
-      self.assertTrue(line in layout_conf)
-
-  def testConfigurationGenerated(self):
-    """Test that portage's files are generated when the config file changes."""
-    brick = self.CreateBrick()
-    sample_config = {'name': 'hello',
-                     'dependencies': []}
-
-    brick.UpdateConfig(sample_config)
-
-    self.assertExists(brick._LayoutConfPath())
-
-  def testFindBrickInPath(self):
-    """Test that we can infer the current brick from the current directory."""
-    brick = self.CreateBrick()
-    os.remove(os.path.join(brick.brick_dir, brick_lib._CONFIG_FILE))
-    brick_dir = os.path.join(self.workspace_path, 'foo', 'bar', 'project')
-    expected_name = 'hello'
-    brick_lib.Brick(brick_dir, initial_config={'name': 'hello'})
-
-    with osutils.ChdirContext(self.workspace_path):
-      self.assertEqual(None, brick_lib.FindBrickInPath())
-
-    with osutils.ChdirContext(brick_dir):
-      self.assertEqual(expected_name,
-                       brick_lib.FindBrickInPath().config['name'])
-
-    subdir = os.path.join(brick_dir, 'sub', 'directory')
-    osutils.SafeMakedirs(subdir)
-    with osutils.ChdirContext(subdir):
-      self.assertEqual(expected_name,
-                       brick_lib.FindBrickInPath().config['name'])
-
-  def testBrickCreation(self):
-    """Test that brick initialization throws the right errors."""
-    brick = self.CreateBrick()
-    with self.assertRaises(brick_lib.BrickCreationFailed):
-      brick_lib.Brick(brick.brick_dir, initial_config={})
-
-    nonexistingbrick = os.path.join(self.workspace_path, 'foo')
-    with self.assertRaises(brick_lib.BrickNotFound):
-      brick_lib.Brick(nonexistingbrick)
-
-  def testLoadNonExistingBrickFails(self):
-    """Tests that trying to load a non-existing brick fails."""
-    self.assertRaises(brick_lib.BrickNotFound, brick_lib.Brick,
-                      self.workspace_path)
-
-  def testLoadExistingNormalBrickSucceeds(self):
-    """Tests that loading an existing brick works."""
-    brick = self.CreateBrick(name='my_brick')
-    brick = brick_lib.Brick(brick.brick_dir, allow_legacy=False)
-    self.assertEquals('my_brick', brick.config.get('name'))
-
-  def testLoadExistingLegacyBrickFailsIfNotAllowed(self):
-    """Tests that loading a legacy brick fails when not allowed."""
-    self.SetupLegacyBrick()
-    with self.assertRaises(brick_lib.BrickNotFound):
-      brick_lib.Brick(self.workspace_path, allow_legacy=False)
-
-  def testLoadExistingLegacyBrickSucceeds(self):
-    """Tests that loading a legacy brick fails when not allowed."""
-    self.SetupLegacyBrick()
-    brick = brick_lib.Brick(self.workspace_path)
-    self.assertEquals('foo', brick.config.get('name'))
-
-  def testLegacyBrickUpdateConfigFails(self):
-    """Tests that a legacy brick config cannot be updated."""
-    self.SetupLegacyBrick()
-    brick = brick_lib.Brick(self.workspace_path)
-    with self.assertRaises(brick_lib.BrickFeatureNotSupported):
-      brick.UpdateConfig({'name': 'bar'})
-
-  def testInherits(self):
-    """Tests the containment checking works as intended."""
-    saved_root = constants.SOURCE_ROOT
-
-    try:
-      # Mock the source root so that we can create fake legacy overlay.
-      constants.SOURCE_ROOT = self.workspace_path
-      legacy = os.path.join(self.workspace_path, 'src', 'overlays',
-                            'overlay-foobar')
-      self.SetupLegacyBrick(brick_dir=legacy, brick_name='foobar')
-
-      bar_brick = brick_lib.Brick('//bar', initial_config={'name': 'bar'})
-      foo_brick = brick_lib.Brick(
-          '//foo', initial_config={'name': 'foo',
-                                   'dependencies': ['//bar', 'board:foobar']})
-
-      self.assertTrue(bar_brick.Inherits('bar'))
-      self.assertTrue(foo_brick.Inherits('bar'))
-      self.assertFalse(bar_brick.Inherits('foo'))
-      self.assertTrue(foo_brick.Inherits('foobar'))
-      self.assertFalse(foo_brick.Inherits('dontexist'))
-
-    finally:
-      constants.SOURCE_ROOT = saved_root
-
-  def testOverlayDir(self):
-    """Tests that overlay directory is returned correctly."""
-    self.assertExists(self.CreateBrick().OverlayDir())
-
-  def testOpenUsingLocator(self):
-    """Tests that we can open a brick given a locator."""
-    brick_lib.Brick(os.path.join(self.workspace_path, 'foo'),
-                    initial_config={'name': 'foo'})
-
-    brick_lib.Brick('//foo')
-
-    with self.assertRaises(brick_lib.BrickNotFound):
-      brick_lib.Brick('//doesnotexist')
-
-  def testCreateUsingLocator(self):
-    """Tests that we can create a brick using a locator."""
-    brick_lib.Brick('//foobar', initial_config={'name': 'foobar'})
-    brick_lib.Brick('//bricks/some/path', initial_config={'name': 'path'})
-
-    brick_lib.Brick('//foobar')
-    brick_lib.Brick('//bricks/some/path')
-
-    brick_lib.Brick(os.path.join(self.workspace_path, 'foobar'))
-    brick_lib.Brick(os.path.join(self.workspace_path, 'bricks', 'some', 'path'))
-
-  def testFriendlyName(self):
-    """Tests that the friendly name generation works."""
-    first = brick_lib.Brick('//foo/bar/test', initial_config={'name': 'test'})
-    self.assertEqual('foo.bar.test', first.FriendlyName())
-
-    second = brick_lib.Brick(os.path.join(self.workspace_path, 'test', 'foo'),
-                             initial_config={'name': 'foo'})
-    self.assertEqual('test.foo', second.FriendlyName())
-
-  def testMissingDependency(self):
-    """Tests that the brick creation fails when a dependency is missing."""
-    with self.assertRaises(brick_lib.BrickCreationFailed):
-      brick_lib.Brick('//bar',
-                      initial_config={'name':'bar',
-                                      'dependencies':['//dont/exist']})
-
-    # If the creation failed, the directory is removed cleanly.
-    self.assertFalse(os.path.exists(workspace_lib.LocatorToPath('//bar')))
-
-  def testNormalizedDependencies(self):
-    """Tests that dependencies are normalized during brick creation."""
-    brick_lib.Brick('//foo/bar', initial_config={'name': 'bar'})
-    with osutils.ChdirContext(os.path.join(self.workspace_path, 'foo')):
-      brick_lib.Brick('//baz', initial_config={'name': 'baz',
-                                               'dependencies': ['bar']})
-
-    deps = brick_lib.Brick('//baz').config['dependencies']
-    self.assertEqual(1, len(deps))
-    self.assertEqual('//foo/bar', deps[0])
-
-  def testBrickStack(self):
-    """Tests that the brick stacking is correct."""
-    def brick_dep(name, deps):
-      config = {'name': os.path.basename(name),
-                'dependencies': deps}
-      return brick_lib.Brick(name, initial_config=config)
-
-    brick_dep('//first', [])
-    brick_dep('//second', ['//first'])
-    third = brick_dep('//third', ['//first', '//second'])
-    fourth = brick_dep('//fourth', ['//second', '//first'])
-
-    self.assertEqual(['//first', '//second', '//third'],
-                     [b.brick_locator for b in third.BrickStack()])
-
-    self.assertEqual(['//first', '//second', '//fourth'],
-                     [b.brick_locator for b in fourth.BrickStack()])
diff --git a/lib/chrome_util.py b/lib/chrome_util.py
index 1772db6..9c7a929 100644
--- a/lib/chrome_util.py
+++ b/lib/chrome_util.py
@@ -308,11 +308,6 @@
 _COPY_PATHS_COMMON = (
     Path('chrome_sandbox', mode=0o4755, dest=_CHROME_SANDBOX_DEST),
     Path('icudtl.dat'),
-    # Set as optional for backwards compatibility.
-    Path('lib/libpeerconnection.so',
-         exe=True,
-         cond=C.StagingFlagSet(_CHROME_INTERNAL_FLAG),
-         optional=True),
     Path('libffmpegsumo.so', exe=True, optional=True),
     Path('libosmesa.so', exe=True, optional=True),
     Path('libpdf.so', exe=True, optional=True),
diff --git a/lib/chroot_util.py b/lib/chroot_util.py
index 742e5b6..603cc3d 100644
--- a/lib/chroot_util.py
+++ b/lib/chroot_util.py
@@ -9,7 +9,6 @@
 import os
 
 from chromite.cbuildbot import constants
-from chromite.lib import brick_lib
 from chromite.lib import cros_build_lib
 from chromite.lib import cros_logging as logging
 from chromite.lib import sysroot_lib
@@ -148,34 +147,6 @@
   cros_build_lib.RunCommand(cmd)
 
 
-def InitializeSysroots(blueprint):
-  """Initialize the sysroots needed by |blueprint|.
-
-  Args:
-    blueprint: a blueprint_lib.Blueprint object.
-  """
-  bsp = brick_lib.Brick(blueprint.GetBSP())
-
-  # Create the brick stack.
-  # Removing duplicates while preserving a sane behaviour is hard:
-  # brbug.com/1029.
-  brick_stack = []
-  for brick_locator in blueprint.GetBricks():
-    brick_stack.extend(brick_lib.Brick(brick_locator).BrickStack())
-
-  # Regenerate the portage configuration for all bricks used by this blueprint.
-  for b in blueprint.GetUsedBricks():
-    b.GeneratePortageConfig()
-
-  sysroot_path = cros_build_lib.GetSysroot(blueprint.FriendlyName())
-
-  sysroot = sysroot_lib.Sysroot(sysroot_path)
-  sysroot.CreateSkeleton()
-  sysroot.WriteConfig(sysroot.GenerateBrickConfig(brick_stack, bsp))
-  sysroot.GeneratePortageConfig()
-  sysroot.UpdateToolchain()
-
-
 def RunUnittests(sysroot, packages, extra_env=None, verbose=False,
                  retries=None):
   """Runs the unit tests for |packages|.
diff --git a/lib/cidb.py b/lib/cidb.py
index b064572..0986c70 100644
--- a/lib/cidb.py
+++ b/lib/cidb.py
@@ -33,8 +33,9 @@
 CIDB_MIGRATIONS_DIR = os.path.join(constants.CHROMITE_DIR, 'cidb',
                                    'migrations')
 
-_RETRYABLE_OPERATIONAL_ERROR_CODES = (
+_RETRYABLE_OPERATIONAL_ERROR_CODES = frozenset([
     1053,   # 'Server shutdown in progress'
+    1205,   # 'Lock wait timeout exceeded; try restarting transaction'
     2003,   # 'Can't connect to MySQL server'
     2006,   # Error code 2006 'MySQL server has gone away' indicates that
             # the connection used was closed or dropped
@@ -44,7 +45,7 @@
             # whether the query completed before or after the connection
             # lost.
     2026,   # 'SSL connection error: unknown error number'
-)
+])
 
 
 def _IsRetryableException(e):
@@ -64,14 +65,23 @@
   # on the exception name. See crbug.com/483654
   if 'OperationalError' in str(type(e)):
     # Unwrap the error till we get to the error raised by the DB backend.
+    # Record each error_code that we encounter along the way.
     e_orig = e
-    while hasattr(e_orig, 'orig'):
-      e_orig = e_orig.orig
-    error_code = e_orig.args[0] if len(e_orig.args) > 0 else -1
-    if error_code in _RETRYABLE_OPERATIONAL_ERROR_CODES:
-      if error_code != 2006:
+    encountered_error_codes = set()
+    while e_orig:
+      if len(e_orig.args) and isinstance(e_orig.args[0], int):
+        encountered_error_codes.add(e_orig.args[0])
+      e_orig = getattr(e_orig, 'orig', None)
+
+    if encountered_error_codes & _RETRYABLE_OPERATIONAL_ERROR_CODES:
+      # Suppress logging of error code 2006 retries. They are routine and
+      # expected, and logging them confuses people.
+      if not 2006 in encountered_error_codes:
         logging.info('RETRYING cidb query due to %s.', e)
       return True
+    else:
+      logging.info('None of error codes encountered %s are-retryable.',
+                   encountered_error_codes)
 
   return False
 
@@ -323,7 +333,8 @@
     if self._meta is not None:
       return
     self._meta = MetaData()
-    self._meta.reflect(bind=self._GetEngine())
+    fn = lambda: self._meta.reflect(bind=self._GetEngine())
+    self._RunFunctorWithRetries(fn)
 
   def _Insert(self, table, values):
     """Create and execute a one-row INSERT query.
@@ -515,6 +526,10 @@
     f = lambda: engine.execute(query, *args, **kwargs)
     logging.info('Running cidb query on pid %s, repr(query) starts with %s',
                  os.getpid(), repr(query)[:100])
+    return self._RunFunctorWithRetries(f)
+
+  def _RunFunctorWithRetries(self, functor):
+    """Run the given |functor| with correct retry parameters."""
     return retry_stats.RetryWithStats(
         retry_stats.CIDB,
         handler=_IsRetryableException,
@@ -523,7 +538,7 @@
         backoff_factor=self.query_retry_args.backoff_factor,
         success_functor=_RetrySuccessHandler,
         raise_first_exception_on_failure=False,
-        functor=f)
+        functor=functor)
 
   def _GetEngine(self):
     """Get the sqlalchemy engine for this process.
@@ -565,6 +580,10 @@
       'SELECT c.id, b.id, action, c.reason, build_config, '
       'change_number, patch_number, change_source, timestamp FROM '
       'clActionTable c JOIN buildTable b ON build_id = b.id ')
+  _SQL_FETCH_MESSAGES = (
+      'SELECT build_id, build_config, waterfall, builder_name, build_number, '
+      'message_type, message_subtype, message_value, timestamp, board FROM '
+      'buildMessageTable c JOIN buildTable b ON build_id = b.id ')
   _DATE_FORMAT = '%Y-%m-%d'
 
   NUM_RESULTS_NO_LIMIT = -1
@@ -581,10 +600,10 @@
     """
     return self._Execute('SELECT NOW()').fetchall()[0][0]
 
-  @minimum_schema(32)
+  @minimum_schema(43)
   def InsertBuild(self, builder_name, waterfall, build_number,
                   build_config, bot_hostname, master_build_id=None,
-                  timeout_seconds=None):
+                  timeout_seconds=None, important=None):
     """Insert a build row.
 
     Args:
@@ -596,6 +615,7 @@
       master_build_id: (Optional) primary key of master build to this build.
       timeout_seconds: (Optional) If provided, total time allocated for this
           build. A deadline is recorded in cidb for the current build to end.
+      important: (Optional) If provided, the |important| value for this build.
     """
     values = {
         'builder_name': builder_name,
@@ -605,7 +625,9 @@
         'build_config': build_config,
         'bot_hostname': bot_hostname,
         'start_time': sqlalchemy.func.current_timestamp(),
-        'master_build_id': master_build_id}
+        'master_build_id': master_build_id,
+        'important': important,
+    }
     if timeout_seconds is not None:
       now = self.GetTime()
       duration = datetime.timedelta(seconds=timeout_seconds)
@@ -730,6 +752,34 @@
               'extra_info': extra_info}
     return self._Insert('failureTable', values)
 
+  @minimum_schema(42)
+  def InsertBuildMessage(self, build_id, message_type=None,
+                         message_subtype=None, message_value=None, board=None):
+    """Insert a build message into database.
+
+    Args:
+      build_id: primary key of build recording this message.
+      message_type: Optional str name of message type.
+      message_subtype: Optional str name of message subtype.
+      message_value: Optional value of message.
+      board: Optional str name of the board.
+    """
+    if message_type:
+      message_type = message_type[:240]
+    if message_subtype:
+      message_subtype = message_subtype[:240]
+    if message_value:
+      message_value = message_value[:480]
+    if board:
+      board = board[:240]
+
+    values = {'build_id': build_id,
+              'message_type': message_type,
+              'message_subtype': message_subtype,
+              'message_value': message_value,
+              'board': board}
+    return self._Insert('buildMessageTable', values)
+
   @minimum_schema(2)
   def UpdateMetadata(self, build_id, metadata):
     """Update the given metadata row in database.
@@ -750,7 +800,8 @@
                          'full_version': versions.get('full'),
                          'sdk_version': d.get('sdk-versions'),
                          'toolchain_url': d.get('toolchain-url'),
-                         'build_type': d.get('build_type')})
+                         'build_type': d.get('build_type'),
+                         'important': d.get('important')})
 
   @minimum_schema(32)
   def ExtendDeadline(self, build_id, timeout_seconds):
@@ -872,7 +923,7 @@
         (status, build_id, child_config))
 
 
-  @minimum_schema(2)
+  @minimum_schema(43)
   def GetBuildStatus(self, build_id):
     """Gets the status of the build.
 
@@ -886,7 +937,7 @@
     statuses = self.GetBuildStatuses([build_id])
     return statuses[0] if statuses else None
 
-  @minimum_schema(2)
+  @minimum_schema(43)
   def GetBuildStatuses(self, build_ids):
     """Gets the statuses of the builds.
 
@@ -896,17 +947,17 @@
     Returns:
       A list of dictionary with keys (id, build_config, start_time,
       finish_time, status, waterfall, build_number, builder_name,
-      platform_version, full_version), or None if no build with this
-      id was found.
+      platform_version, full_version, important), or None if no build
+      with this id was found.
     """
     return self._SelectWhere(
         'buildTable',
         'id IN (%s)' % ','.join(str(int(x)) for x in build_ids),
         ['id', 'build_config', 'start_time', 'finish_time', 'status',
          'waterfall', 'build_number', 'builder_name', 'platform_version',
-         'full_version'])
+         'full_version', 'important'])
 
-  @minimum_schema(2)
+  @minimum_schema(43)
   def GetSlaveStatuses(self, master_build_id):
     """Gets the statuses of slave builders to given build.
 
@@ -916,12 +967,12 @@
 
     Returns:
       A list containing, for each slave build (row) found, a dictionary
-      with keys (id, build_config, start_time, finish_time, status).
+      with keys (id, build_config, start_time, finish_time, status, important).
     """
     return self._SelectWhere('buildTable',
                              'master_build_id = %d' % master_build_id,
                              ['id', 'build_config', 'start_time',
-                              'finish_time', 'status'])
+                              'finish_time', 'status', 'important'])
 
   @minimum_schema(30)
   def GetSlaveStages(self, master_build_id):
@@ -946,6 +997,32 @@
     columns = bs_table_columns + ['build_config']
     return [dict(zip(columns, values)) for values in results]
 
+  @minimum_schema(44)
+  def GetSlaveFailures(self, master_build_id):
+    """Gets the failure entries for slave builds to given build.
+
+    Args:
+      master_build_id: build id of the master build to fetch failures
+                       for.
+
+    Returns:
+      A list containing, for each failure entry, a dictionary with keys
+      (id, build_stage_id, outer_failure_id, exception_type, exception_message,
+       exception_category, extra_info, timestamp, stage_name, board,
+       stage_status, build_id, master_build_id, builder_name, waterfall,
+       build_number, build_config, build_status, important).
+    """
+    columns = ['id', 'build_stage_id', 'outer_failure_id', 'exception_type',
+               'exception_message', 'exception_category', 'extra_info',
+               'timestamp', 'stage_name', 'board', 'stage_status', 'build_id',
+               'master_build_id', 'builder_name', 'waterfall', 'build_number',
+               'build_config', 'build_status', 'important']
+    columns_string = ', '.join(columns)
+    results = self._Execute('SELECT %s FROM failureView '
+                            'WHERE master_build_id = %s ' %
+                            (columns_string, master_build_id)).fetchall()
+    return [dict(zip(columns, values)) for values in results]
+
   @minimum_schema(32)
   def GetTimeToDeadline(self, build_id):
     """Gets the time remaining till the deadline for given build_id.
@@ -978,7 +1055,7 @@
     deadline_past = (r[0][0] == 0)
     return 0 if deadline_past else abs(time_remaining.total_seconds())
 
-  @minimum_schema(2)
+  @minimum_schema(43)
   def GetBuildHistory(self, build_config, num_results,
                       ignore_build_id=None, start_date=None, end_date=None,
                       starting_build_number=None):
@@ -1006,11 +1083,12 @@
       A sorted list of dicts containing up to |number| dictionaries for
       build statuses in descending order. Valid keys in the dictionary are
       [id, build_config, buildbot_generation, waterfall, build_number,
-      start_time, finish_time, platform_version, full_version, status].
+      start_time, finish_time, platform_version, full_version, status,
+      important].
     """
     columns = ['id', 'build_config', 'buildbot_generation', 'waterfall',
                'build_number', 'start_time', 'finish_time', 'platform_version',
-               'full_version', 'status']
+               'full_version', 'status', 'important']
 
     where_clauses = ['build_config = "%s"' % build_config]
     if start_date is not None:
@@ -1149,6 +1227,43 @@
     results = self._Execute('SELECT k, v FROM keyvalTable').fetchall()
     return dict(results)
 
+  @minimum_schema(42)
+  def GetBuildMessages(self, build_id):
+    """Gets build messages from buildMessageTable.
+
+    Args:
+      build_id: The build to get messages for.
+
+    Returns:
+      A list of build message dictionaries, where each dictionary contains
+      keys build_id, build_config, builder_name, build_number, message_type,
+      message_subtype, message_value, timestamp, board.
+    """
+    return self._GetBuildMessagesWithClause('build_id = %s' % build_id)
+
+  @minimum_schema(42)
+  def GetSlaveBuildMessages(self, master_build_id):
+    """Gets build messages from buildMessageTable.
+
+    Args:
+      master_build_id: The build to get all slave messages for.
+
+    Returns:
+      A list of build message dictionaries, where each dictionary contains
+      keys build_id, build_config, waterfall, builder_name, build_number,
+      message_type, message_subtype, message_value, timestamp, board.
+    """
+    return self._GetBuildMessagesWithClause(
+        'master_build_id = %s' % master_build_id)
+
+  def _GetBuildMessagesWithClause(self, clause):
+    """Private helper method for fetching build messages."""
+    columns = ['build_id', 'build_config', 'waterfall', 'builder_name',
+               'build_number', 'message_type', 'message_subtype',
+               'message_value', 'timestamp', 'board']
+    results = self._Execute('%s WHERE %s' % (self._SQL_FETCH_MESSAGES,
+                                             clause)).fetchall()
+    return [dict(zip(columns, values)) for values in results]
 
 def _INV():
   raise AssertionError('CIDB connection factory has been invalidated.')
diff --git a/lib/cidb_integration_test.py b/lib/cidb_integration_test.py
index 121a0d8..3a57316 100644
--- a/lib/cidb_integration_test.py
+++ b/lib/cidb_integration_test.py
@@ -169,6 +169,11 @@
 
   def testWaterfallMigration(self):
     """Test that migrating waterfall from enum to varchar preserves value."""
+    self.skipTest('Skipped obsolete waterfall migration test.')
+    # This test no longer runs. It was used only to confirm the correctness of
+    # migration #41. In #43, the InsertBuild API changes in a way that is not
+    # compatible with this test.
+    # The test code remains in place for demonstration purposes only.
     db = self._PrepareFreshDatabase(40)
     build_id = db.InsertBuild('my builder', 'chromiumos', _random(),
                               'my config', 'my bot hostname')
@@ -195,6 +200,58 @@
     current_db_time = db.GetTime()
     self.assertEqual(type(current_db_time), datetime.datetime)
 
+  def testBuildMessages(self):
+    db = self._PrepareFreshDatabase(45)
+    self.assertEqual([], db.GetBuildMessages(1))
+    master_build_id = db.InsertBuild('builder name',
+                                     constants.WATERFALL_TRYBOT,
+                                     1,
+                                     'master',
+                                     'hostname')
+    slave_build_id = db.InsertBuild('slave builder name',
+                                    constants.WATERFALL_TRYBOT,
+                                    2,
+                                    'slave',
+                                    'slave hostname',
+                                    master_build_id=master_build_id)
+    db.InsertBuildMessage(master_build_id)
+    db.InsertBuildMessage(master_build_id, 'message_type', 'message_subtype',
+                          'message_value', 'board')
+    for i in range(10):
+      db.InsertBuildMessage(slave_build_id,
+                            'message_type', 'message_subtype', str(i), 'board')
+
+    master_messages = db.GetBuildMessages(master_build_id)
+    slave_messages = db.GetSlaveBuildMessages(master_build_id)
+
+    self.assertEqual(2, len(master_messages))
+    self.assertEqual(10, len(slave_messages))
+
+    mm2 = master_messages[1]
+    mm2.pop('timestamp')
+    self.assertEqual({'build_id': master_build_id,
+                      'build_config': 'master',
+                      'waterfall': constants.WATERFALL_TRYBOT,
+                      'builder_name': 'builder name',
+                      'build_number': 1L,
+                      'message_type': 'message_type',
+                      'message_subtype': 'message_subtype',
+                      'message_value': 'message_value',
+                      'board': 'board'},
+                     mm2)
+    sm10 = slave_messages[9]
+    sm10.pop('timestamp')
+    self.assertEqual({'build_id': slave_build_id,
+                      'build_config': 'slave',
+                      'waterfall': constants.WATERFALL_TRYBOT,
+                      'builder_name': 'slave builder name',
+                      'build_number': 2L,
+                      'message_type': 'message_type',
+                      'message_subtype': 'message_subtype',
+                      'message_value': '9',
+                      'board': 'board'},
+                     sm10)
+
   def testGetKeyVals(self):
     db = self._PrepareFreshDatabase(40)
     # In production we would never insert into this table from a bot, but for
@@ -228,9 +285,9 @@
 class DataSeries0Test(CIDBIntegrationTest):
   """Simulate a set of 630 master/slave CQ builds."""
 
-  def testCQWithSchema39(self):
-    """Run the CQ test with schema version 39."""
-    self._PrepareFreshDatabase(39)
+  def testCQWithSchema44(self):
+    """Run the CQ test with schema version 44."""
+    self._PrepareFreshDatabase(44)
     self._runCQTest()
 
   def _runCQTest(self):
@@ -416,7 +473,8 @@
 
       def simulate_slave(slave_metadata):
         build_id = _SimulateBuildStart(db, slave_metadata,
-                                       master_build_id)
+                                       master_build_id,
+                                       important=True)
         _SimulateCQBuildFinish(db, slave_metadata, build_id)
         logging.debug('Simulated slave build %s on pid %s', build_id,
                       os.getpid())
@@ -488,6 +546,12 @@
       bot_db.InsertFailure(build_stage_id, type(e).__name__, str(e), category)
       self.assertTrue(bot_db.HasBuildStageFailed(build_stage_id))
 
+    failures = bot_db.GetSlaveFailures(master_build_id)
+    self.assertEqual(len(failures),
+                     len(constants.EXCEPTION_CATEGORY_ALL_CATEGORIES))
+    for f in failures:
+      self.assertEqual(f['build_id'], build_id)
+
     slave_stages = bot_db.GetSlaveStages(master_build_id)
     self.assertEqual(len(slave_stages), 1)
     self.assertEqual(slave_stages[0]['status'], 'pass')
@@ -568,7 +632,7 @@
   """Simulate a single set of canary builds."""
 
   def runTest(self):
-    """Simulate a single set of canary builds with database schema v28."""
+    """Simulate a single set of canary builds with database schema v44."""
     metadatas = GetTestDataSeries(SERIES_1_TEST_DATA_PATH)
     self.assertEqual(len(metadatas), 18, 'Did not load expected amount of '
                                          'test data')
@@ -576,7 +640,7 @@
     # Migrate db to specified version. As new schema versions are added,
     # migrations to later version can be applied after the test builds are
     # simulated, to test that db contents are correctly migrated.
-    self._PrepareFreshDatabase(39)
+    self._PrepareFreshDatabase(44)
 
     bot_db = self.LocalCIDBConnection(self.CIDB_USER_BOT)
 
@@ -663,7 +727,7 @@
   return status
 
 
-def _SimulateBuildStart(db, metadata, master_build_id=None):
+def _SimulateBuildStart(db, metadata, master_build_id=None, important=None):
   """Returns build_id for the inserted buildTable entry."""
   metadata_dict = metadata.GetDict()
   # TODO(akeshet): We are pretending that all these builds were on the internal
@@ -677,7 +741,8 @@
                             metadata_dict['build-number'],
                             metadata_dict['bot-config'],
                             metadata_dict['bot-hostname'],
-                            master_build_id)
+                            master_build_id,
+                            important=important)
 
   return build_id
 
diff --git a/lib/cidb_setup_unittest b/lib/cidb_setup_unittest
deleted file mode 120000
index 72196ce..0000000
--- a/lib/cidb_setup_unittest
+++ /dev/null
@@ -1 +0,0 @@
-../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cidb_setup_unittest.py b/lib/cidb_setup_unittest.py
deleted file mode 100644
index 680dfbd..0000000
--- a/lib/cidb_setup_unittest.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for cidb.py Setup methods."""
-
-from __future__ import print_function
-
-from chromite.cbuildbot import constants
-from chromite.lib import cidb
-from chromite.lib import cros_test_lib
-from chromite.lib import factory
-
-
-class CIDBConnectionFactoryTest(cros_test_lib.MockTestCase):
-  """Test that CIDBConnectionFactory behaves as expected."""
-
-  def setUp(self):
-    # Ensure that we do not create any live connections in this unit test.
-    self.connection_mock = self.PatchObject(cidb, 'CIDBConnection')
-    # pylint: disable=W0212
-    cidb.CIDBConnectionFactory._ClearCIDBSetup()
-
-  def tearDown(self):
-    # pylint: disable=protected-access
-    cidb.CIDBConnectionFactory._ClearCIDBSetup()
-
-  def testGetConnectionBeforeSetup(self):
-    """Calling GetConnection before Setup should raise exception."""
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder)
-
-  def testSetupProd(self):
-    """Test that SetupProd behaves as expected."""
-    cidb.CIDBConnectionFactory.SetupProdCidb()
-    cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
-
-    # Expected constructor call
-    self.connection_mock.assert_called_once_with(constants.CIDB_PROD_BOT_CREDS)
-    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupProdCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupDebugCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupMockCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupNoCidb)
-
-  def testSetupDebug(self):
-    """Test that SetupDebug behaves as expected."""
-    cidb.CIDBConnectionFactory.SetupDebugCidb()
-    cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
-
-    # Expected constructor call
-    self.connection_mock.assert_called_once_with(constants.CIDB_DEBUG_BOT_CREDS)
-    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupProdCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupDebugCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupMockCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupNoCidb)
-
-  def testInvalidateSetup(self):
-    """Test that cidb connection can be invalidated."""
-    cidb.CIDBConnectionFactory.SetupProdCidb()
-    cidb.CIDBConnectionFactory.InvalidateCIDBSetup()
-    self.assertRaises(AssertionError,
-                      cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder)
-
-  def testSetupMock(self):
-    """Test that SetupMock behaves as expected."""
-    # Set the CIDB to mock mode, but without supplying a mock
-    cidb.CIDBConnectionFactory.SetupMockCidb()
-
-    # Calls to non-mock Setup methods should fail.
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupProdCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupDebugCidb)
-
-    # Now supply a mock.
-    a = object()
-    cidb.CIDBConnectionFactory.SetupMockCidb(a)
-    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
-    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
-                     a)
-
-    # Mock object can be changed by future SetupMockCidb call.
-    b = object()
-    cidb.CIDBConnectionFactory.SetupMockCidb(b)
-    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
-                     b)
-
-    # Mock object can be cleared by future ClearMock call.
-    cidb.CIDBConnectionFactory.ClearMock()
-
-    # Calls to non-mock Setup methods should still fail.
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupProdCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupDebugCidb)
-
-  def testSetupNo(self):
-    """Test that SetupNoCidb behaves as expected."""
-    cidb.CIDBConnectionFactory.SetupMockCidb()
-    cidb.CIDBConnectionFactory.SetupNoCidb()
-    cidb.CIDBConnectionFactory.SetupNoCidb()
-    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
-    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
-                     None)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupProdCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupDebugCidb)
-    self.assertRaises(factory.ObjectFactoryIllegalOperation,
-                      cidb.CIDBConnectionFactory.SetupMockCidb)
diff --git a/lib/cidb_unittest.py b/lib/cidb_unittest.py
index 9ff8056..f94a634 100644
--- a/lib/cidb_unittest.py
+++ b/lib/cidb_unittest.py
@@ -9,8 +9,10 @@
 import exceptions
 import sqlalchemy
 
+from chromite.cbuildbot import constants
 from chromite.lib import cidb
 from chromite.lib import cros_test_lib
+from chromite.lib import factory
 
 
 class RetryableOperationalError(exceptions.EnvironmentError):
@@ -50,3 +52,111 @@
         FatalOperationalError())))
     self.assertFalse(cidb._IsRetryableException(self._WrapError(
         UnknownError())))
+
+
+class CIDBConnectionFactoryTest(cros_test_lib.MockTestCase):
+  """Test that CIDBConnectionFactory behaves as expected."""
+
+  def setUp(self):
+    # Ensure that we do not create any live connections in this unit test.
+    self.connection_mock = self.PatchObject(cidb, 'CIDBConnection')
+    # pylint: disable=W0212
+    cidb.CIDBConnectionFactory._ClearCIDBSetup()
+
+  def tearDown(self):
+    # pylint: disable=protected-access
+    cidb.CIDBConnectionFactory._ClearCIDBSetup()
+
+  def testGetConnectionBeforeSetup(self):
+    """Calling GetConnection before Setup should raise exception."""
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder)
+
+  def testSetupProd(self):
+    """Test that SetupProd behaves as expected."""
+    cidb.CIDBConnectionFactory.SetupProdCidb()
+    cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+
+    # Expected constructor call
+    self.connection_mock.assert_called_once_with(constants.CIDB_PROD_BOT_CREDS)
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupMockCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupNoCidb)
+
+  def testSetupDebug(self):
+    """Test that SetupDebug behaves as expected."""
+    cidb.CIDBConnectionFactory.SetupDebugCidb()
+    cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+
+    # Expected constructor call
+    self.connection_mock.assert_called_once_with(constants.CIDB_DEBUG_BOT_CREDS)
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupMockCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupNoCidb)
+
+  def testInvalidateSetup(self):
+    """Test that cidb connection can be invalidated."""
+    cidb.CIDBConnectionFactory.SetupProdCidb()
+    cidb.CIDBConnectionFactory.InvalidateCIDBSetup()
+    self.assertRaises(AssertionError,
+                      cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder)
+
+  def testSetupMock(self):
+    """Test that SetupMock behaves as expected."""
+    # Set the CIDB to mock mode, but without supplying a mock
+    cidb.CIDBConnectionFactory.SetupMockCidb()
+
+    # Calls to non-mock Setup methods should fail.
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+
+    # Now supply a mock.
+    a = object()
+    cidb.CIDBConnectionFactory.SetupMockCidb(a)
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
+                     a)
+
+    # Mock object can be changed by future SetupMockCidb call.
+    b = object()
+    cidb.CIDBConnectionFactory.SetupMockCidb(b)
+    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
+                     b)
+
+    # Mock object can be cleared by future ClearMock call.
+    cidb.CIDBConnectionFactory.ClearMock()
+
+    # Calls to non-mock Setup methods should still fail.
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+
+  def testSetupNo(self):
+    """Test that SetupNoCidb behaves as expected."""
+    cidb.CIDBConnectionFactory.SetupMockCidb()
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
+                     None)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupMockCidb)
diff --git a/lib/cipd.py b/lib/cipd.py
new file mode 100644
index 0000000..427e027
--- /dev/null
+++ b/lib/cipd.py
@@ -0,0 +1,115 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module to download and run the CIPD client.
+
+CIPD is the Chrome Infra Package Deployer, a simple method of resolving
+package/architecture/version tuples into GStorage links and installing them.
+
+"""
+from __future__ import print_function
+
+import hashlib
+import json
+import os
+import pprint
+import urllib
+import urlparse
+
+import third_party.httplib2 as httplib2
+
+import chromite.lib.cros_logging as log
+from chromite.lib import cache
+from chromite.lib import osutils
+from chromite.lib import path_util
+
+
+# The version of CIPD to download.
+# TODO(phobbs) we could make a call to the 'resolveVersion' endpoint
+#   to resolve 'latest' into an instance_id for us.
+CIPD_INSTANCE_ID = '03f354ad7a6031c7924d9b69a85f83269cc3c2e0'
+CIPD_PACKAGE = 'infra/tools/cipd/linux-amd64'
+
+CHROME_INFRA_PACKAGES_API_BASE = (
+    'https://chrome-infra-packages.appspot.com/_ah/api/repo/v1/')
+
+
+def _ChromeInfraRequest(endpoint, request_args=None):
+  """Makes a request to the Chrome Infra Packages API with httplib2.
+
+  Args:
+    endpoint: The endpoint to make a request to.
+    request_args: Keyword arguments to put into the request string.
+
+  Returns:
+    A tuple of (headers, content) returned by the server. The body content is
+    assumed to be JSON.
+  """
+  uri = ''.join([CHROME_INFRA_PACKAGES_API_BASE,
+                 endpoint,
+                 '?',
+                 urllib.urlencode(request_args or {})])
+  result = httplib2.Http().request(uri=uri)
+  try:
+    return result[0], json.loads(result[1])
+  except Exception as e:
+    e.message = 'Encountered exception requesting "%s":\n' + e.message
+    raise
+
+
+def _DownloadCIPD(instance_id):
+  """Finds the CIPD download link and requests the binary.
+
+  The 'client' endpoit of the chrome infra packages API responds with a sha1 and
+  a Google Storage link. After downloading the binary, we validate that the sha1
+  of the response and return it.
+
+  Args:
+    instance_id: The version of CIPD to download.
+
+  Returns:
+    the CIPD binary as a string.
+  """
+  args = {'instance_id': instance_id, 'package_name': CIPD_PACKAGE}
+  _, body = _ChromeInfraRequest('client', request_args=args)
+  if not 'client_binary' in body:
+    log.error(
+        'Error requesting the link to download CIPD from. Got:\n%s',
+        pprint.pformat(body))
+
+  http = httplib2.Http(cache='.cache')
+  response, binary = http.request(uri=body['client_binary']['fetch_url'])
+  assert response['status'] == '200', (
+      'Got a %s response from Google Storage.' % response['status'])
+  digest = unicode(hashlib.sha1(binary).hexdigest())
+  assert digest == body['client_binary']['sha1'], (
+      'The binary downloaded does not match the expected SHA1.')
+  return binary
+
+
+class CipdCache(cache.RemoteCache):
+  """Supports caching of the CIPD download."""
+  def _Fetch(self, key, path):
+    instance_id = urlparse.urlparse(key).netloc
+    binary = _DownloadCIPD(instance_id)
+    log.info('Fetched CIPD package %s:%s', CIPD_PACKAGE, instance_id)
+    osutils.WriteFile(path, binary)
+    os.chmod(path, 0755)
+
+
+def GetCIPDFromCache(instance_id=CIPD_INSTANCE_ID):
+  """Checks the cache, downloading CIPD if it is missing.
+
+  Args:
+    instance_id: The version of CIPD to download. Default CIPD_INSTANCE_ID
+
+  Returns:
+    Path to the CIPD binary.
+  """
+  cache_dir = os.path.join(path_util.GetCacheDir(), 'cipd')
+  bin_cache = CipdCache(cache_dir)
+  key = (instance_id,)
+  ref = bin_cache.Lookup(key)
+  ref.SetDefault('cipd://' + instance_id)
+  return ref.path
diff --git a/lib/commandline.py b/lib/commandline.py
index 6d95514..d108244 100644
--- a/lib/commandline.py
+++ b/lib/commandline.py
@@ -29,7 +29,6 @@
 from chromite.lib import osutils
 from chromite.lib import path_util
 from chromite.lib import terminal
-from chromite.lib import workspace_lib
 
 
 DEVICE_SCHEME_FILE = 'file'
@@ -318,95 +317,6 @@
       raise ValueError('Unknown device scheme "%s" in "%s"' % (scheme, value))
 
 
-def NormalizeWorkspacePath(path, default_dir=None, extension=None):
-  """Normalize a workspace path.
-
-  Converts |path| into a locator and applies |default_dir| and/or
-  |extension| if specified.
-
-  Args:
-    path: Relative, absolute, or locator path in the CWD workspace.
-    default_dir: If |path| does not contain '/', prepend this
-      directory to the result.
-    extension: If |path| doesn't end in this extension, append this
-      extension to the result.
-
-  Returns:
-    Workspace locator corresponding to the modified |path|.
-
-  Raises:
-    ValueError: |path| isn't in the workspace.
-  """
-  if default_dir and '/' not in path:
-    path = os.path.join(default_dir, path)
-
-  if extension:
-    extension = '.' + extension
-    if os.path.splitext(path)[1] != extension:
-      path += extension
-
-  if workspace_lib.IsLocator(path):
-    return path
-
-  locator = workspace_lib.PathToLocator(path)
-  if not locator:
-    # argparse ignores exception messages; log it as well so the user sees it.
-    error_message = '%s is not in the current workspace.' % path
-    logging.error(error_message)
-    raise ValueError(error_message)
-  return locator
-
-
-def NormalizeBrickPath(path):
-  """Normalize a brick path using some common assumptions.
-
-  Makes the following changes to |path|:
-    1. Put non-paths in //bricks (e.g. foo -> //bricks/foo).
-    2. Convert to a workspace locator.
-
-  Args:
-    path: brick path.
-
-  Returns:
-    Locator to the brick.
-  """
-  return NormalizeWorkspacePath(path, default_dir='//bricks')
-
-
-def NormalizeBspPath(path):
-  """Normalize a BSP path using some common assumptions.
-
-  Makes the following changes to |path|:
-    1. Put non-paths in //bsps (e.g. foo -> //bsps/foo).
-    2. Convert to a workspace locator.
-
-  Args:
-    path: BSP path.
-
-  Returns:
-    Locator to the BSP.
-  """
-  return NormalizeWorkspacePath(path, default_dir='//bsps')
-
-
-def NormalizeBlueprintPath(path):
-  """Normalize a blueprint path using some common assumptions.
-
-  Makes the following changes to |path|:
-    1. Put non-paths in //blueprints (e.g. foo -> //blueprints/foo).
-    2. Add .json if not already present.
-    3. Convert to a workspace locator.
-
-  Args:
-    path: blueprint path.
-
-  Returns:
-    Locator to the blueprint.
-  """
-  return NormalizeWorkspacePath(path, default_dir='//blueprints',
-                                extension='json')
-
-
 VALID_TYPES = {
     'bool': ParseBool,
     'date': ParseDate,
@@ -414,10 +324,6 @@
     'gs_path': NormalizeGSPath,
     'local_or_gs_path': NormalizeLocalOrGSPath,
     'path_or_uri': NormalizeUri,
-    'blueprint_path': NormalizeBlueprintPath,
-    'brick_path': NormalizeBrickPath,
-    'bsp_path': NormalizeBspPath,
-    'workspace_path': NormalizeWorkspacePath,
 }
 
 
diff --git a/lib/commandline_unittest.py b/lib/commandline_unittest.py
index 5333676..22f13d6 100644
--- a/lib/commandline_unittest.py
+++ b/lib/commandline_unittest.py
@@ -240,81 +240,6 @@
                            hostname='foo_host')
 
 
-class NormalizeWorkspacePathTest(cros_test_lib.WorkspaceTestCase):
-  """Tests for NormalizeWorkspacePath() and associated functions."""
-
-  def setUp(self):
-    self.CreateWorkspace()
-    # By default set the CWD to be the workspace directory.
-    self.cwd_mock = self.PatchObject(os, 'getcwd')
-    self.cwd_mock.return_value = self.workspace_path
-
-  def _VerifyNormalized(self, path, expected, **kwargs):
-    """Verifies tests on NormalizeWorkspacePath().
-
-    Args:
-      path: Input path to test.
-      expected: Expected output.
-      kwargs: Keyword args for NormalizeWorkspacePath().
-    """
-    self.assertEqual(expected,
-                     commandline.NormalizeWorkspacePath(path, **kwargs))
-
-
-  def testLocatorConversion(self):
-    """Tests NormalizeWorkspacePath() conversion to a locator."""
-    # Relative paths.
-    self._VerifyNormalized('a', '//a')
-    self._VerifyNormalized('a/b', '//a/b')
-
-    # Absolute paths.
-    self._VerifyNormalized(os.path.join(self.workspace_path, 'a'), '//a')
-    self._VerifyNormalized(os.path.join(self.workspace_path, 'a', 'b'), '//a/b')
-
-    # Locators should be unchanged.
-    self._VerifyNormalized('//a', '//a')
-    self._VerifyNormalized('//a/b', '//a/b')
-
-    # Paths outside the workspace should fail.
-    for path in ('/', '..'):
-      with self.assertRaises(ValueError):
-        commandline.NormalizeWorkspacePath(path)
-
-  def testDefaultDir(self):
-    """Tests the default_dir parameter."""
-    self._VerifyNormalized('a', '//default/a', default_dir='//default')
-    self._VerifyNormalized('a/b', '//a/b', default_dir='//default')
-    self._VerifyNormalized('./a', '//a', default_dir='//default')
-
-  def testExtension(self):
-    """Tests the extension parameter."""
-    self._VerifyNormalized('a', '//a.txt', extension='txt')
-    self._VerifyNormalized('a.bin', '//a.bin.txt', extension='txt')
-    self._VerifyNormalized('a.txt', '//a.txt', extension='txt')
-
-  def testSpecificPaths(self):
-    """Tests normalizing brick/BSP/blueprint paths."""
-    self.assertEqual('//bricks/a', commandline.NormalizeBrickPath('a'))
-    self.assertEqual('//bsps/a', commandline.NormalizeBspPath('a'))
-    self.assertEqual('//blueprints/a.json',
-                     commandline.NormalizeBlueprintPath('a'))
-
-  def testParser(self):
-    """Tests adding these types to a parser."""
-    parser = commandline.ArgumentParser()
-    parser.add_argument('path', type='workspace_path')
-    parser.add_argument('brick', type='brick_path')
-    parser.add_argument('bsp', type='bsp_path')
-    parser.add_argument('blueprint', type='blueprint_path')
-
-    options = parser.parse_args(['my_path', 'my_brick', 'my_bsp',
-                                 'my_blueprint'])
-    self.assertEqual('//my_path', options.path)
-    self.assertEqual('//bricks/my_brick', options.brick)
-    self.assertEqual('//bsps/my_bsp', options.bsp)
-    self.assertEqual('//blueprints/my_blueprint.json', options.blueprint)
-
-
 class CacheTest(cros_test_lib.MockTempDirTestCase):
   """Test cache dir default / override functionality."""
 
diff --git a/lib/cros_build_lib.py b/lib/cros_build_lib.py
index 01d9652..eb937fa 100644
--- a/lib/cros_build_lib.py
+++ b/lib/cros_build_lib.py
@@ -958,6 +958,27 @@
   return rc_func(cmd, cwd=cwd, **kwargs)
 
 
+def GroupByKey(input_iter, key):
+  """Split an iterable of dicts, based on value of a key.
+
+  GroupByKey([{'a': 1}, {'a': 2}, {'a': 1, 'b': 2}], 'a') =>
+    {1: [{'a': 1}, {'a': 1, 'b': 2}], 2: [{'a': 2}]}
+
+  Args:
+    input_iter: An iterable of dicts.
+    key: A string specifying the key name to split by.
+
+  Returns:
+    A dictionary, mapping from each unique value for |key| that
+    was encountered in |input_iter| to a list of entries that had
+    that value.
+  """
+  split_dict = dict()
+  for entry in input_iter:
+    split_dict.setdefault(entry.get(key), []).append(entry)
+  return split_dict
+
+
 def GetInput(prompt):
   """Helper function to grab input from a user.   Makes testing easier."""
   return raw_input(prompt)
diff --git a/lib/cros_build_lib_unittest.py b/lib/cros_build_lib_unittest.py
index 87167ea..24c7507 100644
--- a/lib/cros_build_lib_unittest.py
+++ b/lib/cros_build_lib_unittest.py
@@ -1330,6 +1330,26 @@
     self.assertEqual(branches, ['refs/remotes/origin/release-R23-2913.B'])
 
 
+class TestGroupByKey(cros_test_lib.TestCase):
+  """Test SplitByKey."""
+
+  def testEmpty(self):
+    self.assertEqual({}, cros_build_lib.GroupByKey([], ''))
+
+  def testGroupByKey(self):
+    input_iter = [{'a': None, 'b': 0},
+                  {'a': 1, 'b': 1},
+                  {'a': 2, 'b': 2},
+                  {'a': 1, 'b': 3}]
+    expected_result = {
+        None: [{'a': None, 'b': 0}],
+        1:    [{'a': 1, 'b': 1},
+               {'a': 1, 'b': 3}],
+        2:    [{'a': 2, 'b': 2}]}
+    self.assertEqual(cros_build_lib.GroupByKey(input_iter, 'a'),
+                     expected_result)
+
+
 class Test_iflatten_instance(cros_test_lib.TestCase):
   """Test iflatten_instance function."""
 
diff --git a/lib/cros_test_lib.py b/lib/cros_test_lib.py
index e0a3d1a..d4b68c2 100644
--- a/lib/cros_test_lib.py
+++ b/lib/cros_test_lib.py
@@ -29,9 +29,6 @@
 
 from chromite.cbuildbot import config_lib
 from chromite.cbuildbot import constants
-from chromite.lib import blueprint_lib
-from chromite.lib import bootstrap_lib
-from chromite.lib import brick_lib
 from chromite.lib import cidb
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib
@@ -46,7 +43,6 @@
 from chromite.lib import retry_util
 from chromite.lib import terminal
 from chromite.lib import timeout_util
-from chromite.lib import workspace_lib
 
 
 site_config = config_lib.GetConfig()
@@ -1569,128 +1565,6 @@
   """Convenience class mixing Logging and Mock."""
 
 
-class WorkspaceTestCase(MockTempDirTestCase):
-  """Test case that adds utilities for using workspaces."""
-
-  def setUp(self):
-    """Define variables populated below, mostly to make lint happy."""
-    self.bootstrap_path = None
-    self.mock_bootstrap_path = None
-
-    self.workspace_path = None
-    self.workspace_config = None
-    self.mock_workspace_path = None
-
-  def CreateBootstrap(self, sdk_version=None):
-    """Create a fake bootstrap directory in self.tempdir.
-
-    self.bootstrap_path points to new workspace path.
-    self.mock_bootstrap_path points to mock of FindBootstrapPath
-
-    Args:
-      sdk_version: Create a fake SDK version that's present in bootstrap.
-    """
-    # Create a bootstrap, inside our tempdir.
-    self.bootstrap_path = os.path.join(self.tempdir, 'bootstrap')
-    osutils.SafeMakedirs(os.path.join(self.bootstrap_path, '.git'))
-
-    # If a version is provided, fake it's existence in the bootstrap.
-    if sdk_version is not None:
-      sdk_path = bootstrap_lib.ComputeSdkPath(self.bootstrap_path, sdk_version)
-      osutils.SafeMakedirs(os.path.join(sdk_path, '.repo'))
-      osutils.SafeMakedirs(os.path.join(sdk_path, 'chromite', '.git'))
-
-    # Fake out bootstrap lookups to find this path.
-    self.mock_bootstrap_path = self.PatchObject(
-        bootstrap_lib, 'FindBootstrapPath', return_value=self.bootstrap_path)
-
-  def CreateWorkspace(self, sdk_version=None):
-    """Create a fake workspace directory in self.tempdir.
-
-    self.workspace_path points to new workspace path.
-    self.workspace_config points to workspace config file.
-    self.mock_workspace_path points to mock of WorkspacePath
-
-    Args:
-      sdk_version: Mark SDK version as active in workspace. Does NOT mean
-         it's present in bootstrap.
-    """
-    # Create a workspace, inside our tempdir.
-    self.workspace_path = os.path.join(self.tempdir, 'workspace')
-    self.workspace_config = os.path.join(
-        self.workspace_path,
-        workspace_lib.WORKSPACE_CONFIG)
-    osutils.Touch(self.workspace_config, makedirs=True)
-
-    # Define an SDK version for it, if needed.
-    if sdk_version is not None:
-      workspace_lib.SetActiveSdkVersion(self.workspace_path, sdk_version)
-
-    # Fake out workspace lookups to find this path.
-    self.mock_workspace_path = self.PatchObject(
-        workspace_lib, 'WorkspacePath', return_value=self.workspace_path)
-
-  def CreateBrick(self, name='thebrickfoo', main_package='category/bar',
-                  dependencies=None):
-    """Creates a new brick.
-
-    Args:
-      name: Brick name/path relative to the workspace root.
-      main_package: Main package to assign.
-      dependencies: List of bricks to depend on.
-
-    Returns:
-      The created Brick object.
-    """
-    brick_path = os.path.join(self.workspace_path, name)
-    config = {'name': name, 'main_package': main_package}
-    if dependencies:
-      config['dependencies'] = dependencies
-
-    return brick_lib.Brick(brick_path, initial_config=config)
-
-  def CreateBlueprint(self, name='theblueprintfoo.json', bsp=None, bricks=None,
-                      buildTargetId=None):
-    """Creates a new blueprint.
-
-    Args:
-      name: Blueprint name/path relative to the workspace root.
-      bsp: Path to BSP or None.
-      bricks: List of paths to bricks or None.
-      buildTargetId: The BuildTargetID to populate the APP_ID with or None.
-
-    Returns:
-      The created Blueprint object.
-    """
-    blueprint_path = os.path.join(self.workspace_path, name)
-
-    config = {}
-    if bricks:
-      config[blueprint_lib.BRICKS_FIELD] = bricks
-    if bsp:
-      config[blueprint_lib.BSP_FIELD] = bsp
-    if buildTargetId:
-      config[blueprint_lib.APP_ID_FIELD] = buildTargetId
-
-    return blueprint_lib.Blueprint(blueprint_path, initial_config=config)
-
-  def AssertBlueprintExists(self, name, bsp=None, bricks=None):
-    """Verifies a blueprint exists with the specified contents.
-
-    Args:
-      name: Blueprint name/path relative to the workspace root.
-      bsp: Expected blueprint BSP or None.
-      bricks: Expected blueprint bricks or None.
-    """
-    blueprint_path = os.path.join(self.workspace_path, name)
-    blueprint = blueprint_lib.Blueprint(blueprint_path)
-
-    if bsp is not None:
-      self.assertEqual(bsp, blueprint.GetBSP())
-    if bricks is not None:
-      self.assertListEqual(bricks, blueprint.GetBricks())
-
-
 @contextlib.contextmanager
 def SetTimeZone(tz):
   """Temporarily set the timezone to the specified value.
diff --git a/lib/cros_test_lib_unittest.py b/lib/cros_test_lib_unittest.py
index 68aad6c..5786a2c 100644
--- a/lib/cros_test_lib_unittest.py
+++ b/lib/cros_test_lib_unittest.py
@@ -12,14 +12,12 @@
 import time
 import unittest
 
-from chromite.lib import bootstrap_lib
 from chromite.lib import cros_test_lib
 from chromite.lib import cros_build_lib
 from chromite.lib import cros_build_lib_unittest
 from chromite.lib import osutils
 from chromite.lib import partial_mock
 from chromite.lib import timeout_util
-from chromite.lib import workspace_lib
 
 
 # pylint: disable=W0212,W0233
@@ -274,87 +272,3 @@
     # Verify that output is actually written to the correct files.
     self.assertEqual('foo\n', osutils.ReadFile(stdout_path))
     self.assertEqual('bar\n', osutils.ReadFile(stderr_path))
-
-
-class WorkspaceTestCaseTest(cros_test_lib.WorkspaceTestCase):
-  """Verification for WorkspaceTestCase."""
-
-  def testCreateWorkspace(self):
-    """Tests CreateWorkspace()."""
-    self.CreateWorkspace()
-    self.assertExists(self.workspace_path)
-    self.assertEqual(self.workspace_path, workspace_lib.WorkspacePath())
-
-  def testCreateWorkspaceSdk(self):
-    """Tests CreateWorkspace() with an SDK version."""
-    self.CreateWorkspace(sdk_version='1.2.3')
-    self.assertEqual('1.2.3',
-                     workspace_lib.GetActiveSdkVersion(self.workspace_path))
-
-  def testCreateBootstrap(self):
-    """Tests CreateBootstrap()."""
-    self.CreateBootstrap()
-    self.assertExists(self.bootstrap_path)
-    self.assertEqual(self.bootstrap_path, bootstrap_lib.FindBootstrapPath())
-
-  def testCreateBootstrapSdk(self):
-    """Tests CreateBootstrap() with an SDK version."""
-    self.CreateBootstrap(sdk_version='1.2.3')
-    self.assertExists(
-        bootstrap_lib.ComputeSdkPath(self.bootstrap_path, '1.2.3'))
-
-  def testCreateBrick(self):
-    """Tests CreateBrick()."""
-    self.CreateWorkspace()
-
-    self.CreateBrick(name='bar')
-    brick = self.CreateBrick(name='foo', main_package='category/bar',
-                             dependencies=['//bar'])
-    self.assertEqual(os.path.join(self.workspace_path, 'foo'), brick.brick_dir)
-    self.assertEqual('foo', brick.FriendlyName())
-    self.assertEqual(['category/bar'], brick.MainPackages())
-    self.assertEqual(['//bar'], [b.brick_locator for b in brick.Dependencies()])
-
-  def testCreateBlueprint(self):
-    """Tests CreateBlueprint()."""
-    brick_path = '//foo_brick'
-    bsp_path = '//foo_bsp'
-    blueprint_path = 'foo.json'
-
-    self.CreateWorkspace()
-    self.CreateBrick(brick_path)
-    self.CreateBrick(bsp_path)
-
-    blueprint = self.CreateBlueprint(name=blueprint_path, bsp=bsp_path,
-                                     bricks=[brick_path])
-    self.assertExists(os.path.join(self.workspace_path, blueprint_path))
-    self.assertEqual(bsp_path, blueprint.GetBSP())
-    self.assertEqual([brick_path], blueprint.GetBricks())
-
-  def testAssertBlueprintExists(self):
-    """Tests AssertBlueprintExists()."""
-    brick_path = '//foo_brick'
-    bsp_path = '//foo_bsp'
-    blueprint_path = 'foo.json'
-
-    self.CreateWorkspace()
-    self.CreateBrick(brick_path)
-    self.CreateBrick(bsp_path)
-    self.CreateBlueprint(name=blueprint_path, bsp=bsp_path, bricks=[brick_path])
-
-    # Test success conditions.
-    self.AssertBlueprintExists(blueprint_path)
-    self.AssertBlueprintExists(blueprint_path, bsp=bsp_path)
-    self.AssertBlueprintExists(blueprint_path, bricks=[brick_path])
-    self.AssertBlueprintExists(blueprint_path, bsp=bsp_path,
-                               bricks=[brick_path])
-
-    # Test failure conditions.
-    def TestFailure(blueprint_path, bsp=None, bricks=None):
-      with self.assertRaises(Exception):
-        self.AssertBlueprintExists(blueprint_path, bsp=bsp, bricks=bricks)
-
-    TestFailure('//no/blueprint')
-    TestFailure(blueprint_path, bsp='//no/bsp')
-    TestFailure(blueprint_path, bricks=['//no/brick'])
-    TestFailure(blueprint_path, bricks=[brick_path, '//no/brick'])
diff --git a/lib/factory_unittest.py b/lib/factory_unittest.py
index 3e0e77d..dba22ac 100644
--- a/lib/factory_unittest.py
+++ b/lib/factory_unittest.py
@@ -71,5 +71,3 @@
     a = self.of2.GetInstance()
     self.of2.Setup('t4', None)
     self.assertNotEqual(a, self.of2.GetInstance())
-
-
diff --git a/lib/fake_cidb.py b/lib/fake_cidb.py
index 0efb4e2..5cabeb7 100644
--- a/lib/fake_cidb.py
+++ b/lib/fake_cidb.py
@@ -44,7 +44,8 @@
 
   def InsertBuild(self, builder_name, waterfall, build_number,
                   build_config, bot_hostname, master_build_id=None,
-                  timeout_seconds=None, status=constants.BUILDER_STATUS_PASSED):
+                  timeout_seconds=None, status=constants.BUILDER_STATUS_PASSED,
+                  important=None):
     """Insert a build row.
 
     Note this API slightly differs from cidb as we pass status to avoid having
@@ -66,7 +67,9 @@
            'start_time': datetime.datetime.now(),
            'master_build_id' : master_build_id,
            'deadline': deadline,
-           'status': status}
+           'status': status,
+           'finish_time': datetime.datetime.now(),
+           'important': important}
     self.buildTable.append(row)
     return build_id
 
@@ -81,7 +84,8 @@
          'full_version': versions.get('full'),
          'sdk_version': d.get('sdk-versions'),
          'toolchain_url': d.get('toolchain-url'),
-         'build_type': d.get('build_type')})
+         'build_type': d.get('build_type'),
+         'important': d.get('important')})
     return 1
 
   def InsertCLActions(self, build_id, cl_actions, timestamp=None):
@@ -194,6 +198,11 @@
     """Gets the status of the builds."""
     return [self.buildTable[x -1] for x in build_ids]
 
+  def GetSlaveStatuses(self, master_build_id):
+    """Gets the slaves of given build."""
+    return [b for b in self.buildTable
+            if b['master_build_id'] == master_build_id]
+
   def GetBuildHistory(self, build_config, num_results,
                       ignore_build_id=None, start_date=None, end_date=None,
                       starting_build_number=None):
@@ -218,6 +227,7 @@
     if end_date is not None:
       build_configs = [b for b in build_configs
                        if 'finish_time' in b and
+                       b['finish_time'] and
                        b['finish_time'].date() <= end_date]
     if starting_build_number is not None:
       build_configs = [b for b in build_configs
diff --git a/lib/gerrit.py b/lib/gerrit.py
index 2a1155d..267ba2d 100644
--- a/lib/gerrit.py
+++ b/lib/gerrit.py
@@ -397,9 +397,9 @@
     gob_util.RestoreChange(self.host, self._to_changenum(change))
 
   def DeleteDraft(self, change, dryrun=False):
-    """Delete a draft patch set."""
+    """Delete a gerrit change iff all its revisions are drafts."""
     if dryrun:
-      logging.info('Would have deleted draft patch set %s', change)
+      logging.info('Would have deleted draft change %s', change)
       return
     gob_util.DeleteDraft(self.host, self._to_changenum(change))
 
diff --git a/lib/git.py b/lib/git.py
index 694dc80..b2ed600 100644
--- a/lib/git.py
+++ b/lib/git.py
@@ -67,6 +67,9 @@
 
     # crbug.com/451458, b/19202011
     r'repository cannot accept new pushes; contact support',
+
+    # crbug.com/535306
+    r'Service Temporarily Unavailable',
 )
 
 GIT_TRANSIENT_ERRORS_RE = re.compile('|'.join(GIT_TRANSIENT_ERRORS),
@@ -146,6 +149,7 @@
   """
   cmd = ['fsck', '--no-progress', '--no-dangling']
   try:
+    GarbageCollection(cwd)
     RunGit(cwd, cmd)
     return False
   except cros_build_lib.RunCommandError as ex:
diff --git a/lib/gob_util.py b/lib/gob_util.py
index 2fe6753..d1a6301 100644
--- a/lib/gob_util.py
+++ b/lib/gob_util.py
@@ -368,7 +368,7 @@
 
 
 def DeleteDraft(host, change):
-  """Delete a gerrit draft patch set."""
+  """Delete a gerrit draft change."""
   path = _GetChangePath(change)
   try:
     FetchUrl(host, path, reqtype='DELETE', ignore_204=True, ignore_404=False)
diff --git a/lib/graphite_lib/elasticsearch_mock_unittest.py b/lib/graphite_lib/elasticsearch_mock_unittest.py
index 59608fa..09edd60 100644
--- a/lib/graphite_lib/elasticsearch_mock_unittest.py
+++ b/lib/graphite_lib/elasticsearch_mock_unittest.py
@@ -44,4 +44,3 @@
       raise elasticsearch.ElasticsearchException('error message')
     except elasticsearch.ElasticsearchException:
       pass
-
diff --git a/lib/gs.py b/lib/gs.py
index 51c1752..7634b38 100644
--- a/lib/gs.py
+++ b/lib/gs.py
@@ -253,7 +253,7 @@
   # (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
   DEFAULT_SLEEP_TIME = 60
 
-  GSUTIL_VERSION = '4.13'
+  GSUTIL_VERSION = '4.15'
   GSUTIL_TAR = 'gsutil_%s.tar.gz' % GSUTIL_VERSION
   GSUTIL_URL = (PUBLIC_BASE_HTTPS_URL +
                 'chromeos-mirror/gentoo/distfiles/%s' % GSUTIL_TAR)
@@ -956,15 +956,15 @@
       Assorted GSContextException exceptions.
     """
     try:
-      res = self.DoCommand(['stat', path], redirect_stdout=True, **kwargs)
+      res = self.DoCommand(['stat', '--', path], redirect_stdout=True, **kwargs)
     except GSCommandError as e:
       # Because the 'gsutil stat' command logs errors itself (instead of
       # raising errors internally like other commands), we have to look
-      # for errors ourselves.  See the bug report here:
+      # for errors ourselves.  See the related bug report here:
       # https://github.com/GoogleCloudPlatform/gsutil/issues/288
       # Example line:
-      # INFO 0713 05:58:12.451810 stat.py] No URLs matched gs://bucket/file
-      if re.match(r'INFO [ 0-9:.]* stat.py\] No URLs matched', e.result.error):
+      # No URLs matched gs://bucket/file
+      if e.result.error.startswith('No URLs matched'):
         raise GSNoSuchKey(path)
 
       # No idea what this is, so just choke.
diff --git a/lib/gs_unittest.py b/lib/gs_unittest.py
index af84d10..7d319d5 100644
--- a/lib/gs_unittest.py
+++ b/lib/gs_unittest.py
@@ -177,7 +177,7 @@
 
   def testBasic(self):
     """Simple test."""
-    self.gs_mock.AddCmdResult(['stat', self.GETSIZE_PATH],
+    self.gs_mock.AddCmdResult(['stat', '--', self.GETSIZE_PATH],
                               output=StatTest.STAT_OUTPUT)
     self.assertEqual(self.GetSize(), 74)
 
@@ -1009,11 +1009,11 @@
 
   def testGetGeneration(self):
     """Test ability to get the generation of a file."""
-    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+    self.gs_mock.AddCmdResult(['stat', '--', 'gs://abc/1'],
                               output=StatTest.STAT_OUTPUT)
     ctx = gs.GSContext()
     ctx.GetGeneration('gs://abc/1')
-    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+    self.gs_mock.assertCommandContains(['stat', '--', 'gs://abc/1'])
 
   def testCreateCached(self):
     """Test that the function runs through."""
@@ -1120,16 +1120,15 @@
       """
 
   # When stat throws an error.  It's a special snow flake.
-  STAT_ERROR_OUTPUT = ('INFO 0713 05:58:12.451810 stat.py] '
-                       'No URLs matched gs://abc/1')
+  STAT_ERROR_OUTPUT = 'No URLs matched gs://abc/1'
 
   def testStat(self):
     """Test ability to get the generation of a file."""
-    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+    self.gs_mock.AddCmdResult(['stat', '--', 'gs://abc/1'],
                               output=self.STAT_OUTPUT)
     ctx = gs.GSContext()
     result = ctx.Stat('gs://abc/1')
-    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+    self.gs_mock.assertCommandContains(['stat', '--', 'gs://abc/1'])
 
     self.assertEqual(result.creation_time,
                      datetime.datetime(2014, 8, 23, 6, 53, 20))
@@ -1143,11 +1142,11 @@
 
   def testStatOlderOutput(self):
     """Test ability to get the generation of a file."""
-    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+    self.gs_mock.AddCmdResult(['stat', '--', 'gs://abc/1'],
                               output=self.STAT_OUTPUT_OLDER)
     ctx = gs.GSContext()
     result = ctx.Stat('gs://abc/1')
-    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+    self.gs_mock.assertCommandContains(['stat', '--', 'gs://abc/1'])
 
     self.assertEqual(result.creation_time,
                      datetime.datetime(2014, 8, 23, 6, 53, 20))
@@ -1161,12 +1160,12 @@
 
   def testStatNoExist(self):
     """Test ability to get the generation of a file."""
-    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+    self.gs_mock.AddCmdResult(['stat', '--', 'gs://abc/1'],
                               error=self.STAT_ERROR_OUTPUT,
                               returncode=1)
     ctx = gs.GSContext()
     self.assertRaises(gs.GSNoSuchKey, ctx.Stat, 'gs://abc/1')
-    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+    self.gs_mock.assertCommandContains(['stat', '--', 'gs://abc/1'])
 
 
 class UnmockedStatTest(cros_test_lib.TempDirTestCase):
diff --git a/lib/launch_control/__init__.py b/lib/launch_control/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/launch_control/__init__.py
diff --git a/bootstrap/scripts/brillo_unittest b/lib/launch_control/launch_control
similarity index 100%
rename from bootstrap/scripts/brillo_unittest
rename to lib/launch_control/launch_control
diff --git a/lib/launch_control/launch_control.py b/lib/launch_control/launch_control.py
new file mode 100644
index 0000000..da829cd
--- /dev/null
+++ b/lib/launch_control/launch_control.py
@@ -0,0 +1,142 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Select an Android build, and download symbols for it."""
+
+from __future__ import print_function
+
+import json
+import apiclient
+
+from httplib2 import Http
+from apiclient.discovery import build
+from oauth2client.client import SignedJwtAssertionCredentials
+
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+
+
+def OpenBuildApiProxy(json_key_file):
+  """Open an Android Internal Build API Apiary proxy.
+
+  Will NOT error out if authentication fails until the first real request is
+  made.
+
+  Args:
+    json_key_file: A Json key file to authenticate with. Retrieved from the
+                   Google Developer Console associated with the account to
+                   be used for the requests.
+
+  Returns:
+    Proxy object used to make requests against the API.
+  """
+  # Load the private key associated with the Google service account.
+  with open(json_key_file) as json_file:
+    json_data = json.load(json_file)
+    credentials = SignedJwtAssertionCredentials(
+        json_data['client_email'],
+        json_data['private_key'],
+        'https://www.googleapis.com/auth/androidbuild.internal')
+
+  # Open an authorized API proxy.
+  # See https://g3doc.corp.google.com/wireless/android/build_tools/
+  #         g3doc/public/build_data.md
+  http_auth = credentials.authorize(Http())
+  return build('androidbuildinternal', 'v2beta1', http=http_auth)
+
+
+def FindRecentBuildIds(build_api_proxy, branch, target):
+  """Fetch a list of successful completed build ids for a given branch/target.
+
+  This roughly matches the contents of the first page of build results on the
+  launch control website, except filtered for only successful/completed builds.
+
+  Since builds sometimes complete out of order, new builds can be added to the
+  list out of order.
+
+  Args:
+    build_api_proxy: Result of a previous call to OpenBuildApiProxy.
+    branch: Name of branch to search. Ex. 'git_mnc-dr-ryu-release'
+    target: Build target to search. Ex. 'ryu-userdebug'
+
+  Returns:
+    List of build_ids as integers.
+  """
+  result = build_api_proxy.build().list(
+      buildType='submitted',
+      branch=branch,
+      buildAttemptStatus='complete',
+      successful=True,
+      target=target,
+  ).execute()
+
+  # Extract the build_ids, arrange oldest to newest.
+  return sorted(int(b['buildId']) for b in result['builds'])
+
+
+def FetchBuildArtifact(build_api_proxy, build_id, target, resource_id,
+                       output_file):
+  """Fetch debug symbols associated with a given build.
+
+  Args:
+    build_api_proxy: Result of a previous call to OpenBuildApiProxy.
+    build_id: id of the build to fetch symbols for.
+    target: Build to target fetch symbols for. Ex. 'ryu-userdebug'
+    resource_id: Resource id to fetch. Ex. 'ryu-symbols-2282124.zip'
+    output_file: Path to where to write out the downloaded artifact.
+  """
+  # Open the download connection.
+  download_req = build_api_proxy.buildartifact().get_media(
+      buildId=build_id,
+      target=target,
+      attemptId='latest',
+      resourceId=resource_id)
+
+  # Download the symbols file contents.
+  with open(output_file, mode='wb') as fh:
+    downloader = apiclient.http.MediaIoBaseDownload(
+        fh, download_req, chunksize=20 * 1024 * 1024)
+    done = False
+    while not done:
+      _status, done = downloader.next_chunk()
+
+
+def main(argv):
+  """Command line wrapper for integration testing of the above library.
+
+  This library requires the ability to authenticate to an external service that
+  is restricted from the general public. So, allow manual integration testing by
+  users that have the necessary credentials.
+  """
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('--json-key-file', type='path', required=True,
+                      help='Json key file for authenticating to service.')
+  parser.add_argument('--symbols-file', type='path', default='symbols.zip',
+                      help='Where to write symbols file out.')
+  parser.add_argument('--branch', type=str, default='git_mnc-dr-ryu-release',
+                      help='Branch to locate build for.')
+  parser.add_argument('--target', type=str, default='ryu-userdebug',
+                      help='Target to locate build for.')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  build_proxy = OpenBuildApiProxy(opts.json_key_file)
+
+  build_ids = FindRecentBuildIds(
+      build_proxy,
+      branch=opts.branch,
+      target=opts.target)
+
+  build_id = build_ids[0]
+
+  # 'ryu-userdebug' -> 'ryu'
+  board = opts.target.split('-')[0]
+  # E.g. 'ryu-symbols-2282124.zip'
+  resource_id = '%s-symbols-%s.zip' % (board, build_id)
+
+  logging.info('Selected buildId: %s', build_id)
+  FetchBuildArtifact(build_proxy, build_id, opts.target, resource_id,
+                     opts.symbols_file)
diff --git a/lib/launch_control/processed_builds.py b/lib/launch_control/processed_builds.py
new file mode 100644
index 0000000..3e8f614
--- /dev/null
+++ b/lib/launch_control/processed_builds.py
@@ -0,0 +1,84 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Select an Android build, and download symbols for it."""
+
+from __future__ import print_function
+
+import json
+
+from chromite.lib import osutils
+
+
+class ProcessedBuildsStorage(object):
+  """A context manager for storing processed builds.
+
+  This is a context manager that loads recent builds, and allows them to be
+  manipulated, and then saves them on exit. Processed builds are stored per
+  branch/target as a list of integers.
+  """
+  def __init__(self, filename):
+    self.filename = filename
+    self.value = self._read()
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    self._write(self.value)
+
+  def _read(self):
+    """Load from disk, and default to an empty store on error."""
+    try:
+      return json.loads(osutils.ReadFile(self.filename))
+    except (ValueError, IOError):
+      # If there was no file, or it was corrupt json, return default.
+      return {}
+
+  def _write(self, new_value):
+    """Write the current store to disk."""
+    return osutils.WriteFile(self.filename,
+                             json.dumps(new_value, sort_keys=True))
+
+  def GetProcessedBuilds(self, branch, target):
+    """Get a list of builds for a branch/target.
+
+    Args:
+      branch: Name of branch as a string.
+      target: Name of target as a string.
+
+    Returns:
+      List of integers associated with the given branch/target.
+    """
+    self.value.setdefault(branch, {})
+    self.value[branch].setdefault(target, [])
+    return self.value[branch][target]
+
+  def PurgeOldBuilds(self, branch, target, retain_list):
+    """Removes uninteresting builds for a branch/target.
+
+    Any build ids not in the retain list are removed.
+
+    Args:
+      branch: Name of branch as a string.
+      target: Name of target as a string.
+      retain_list: List of build ids that are still relevent.
+    """
+    processed = set(self.GetProcessedBuilds(branch, target))
+    retained_processed = processed.intersection(retain_list)
+    self.value[branch][target] = list(retained_processed)
+
+  def AddProcessedBuild(self, branch, target, build_id):
+    """Adds build_id to list for a branch/target.
+
+    It's safe to add a build_id that is already present.
+
+    Args:
+      branch: Name of branch as a string.
+      target: Name of target as a string.
+      build_id: build_id to add, as an integer.
+    """
+    processed = set(self.GetProcessedBuilds(branch, target))
+    processed.add(build_id)
+    self.value[branch][target] = sorted(processed)
diff --git a/cbuildbot/builders/builders_unittest b/lib/launch_control/processed_builds_unittest
similarity index 100%
copy from cbuildbot/builders/builders_unittest
copy to lib/launch_control/processed_builds_unittest
diff --git a/lib/launch_control/processed_builds_unittest.py b/lib/launch_control/processed_builds_unittest.py
new file mode 100644
index 0000000..91443b6
--- /dev/null
+++ b/lib/launch_control/processed_builds_unittest.py
@@ -0,0 +1,84 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test processed_builds."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib.launch_control import processed_builds
+from chromite.lib import cros_test_lib
+
+
+# Unitests often need access to internals of the thing they test.
+# pylint: disable=protected-access
+
+class ProcessedBuildsStorageTest(cros_test_lib.TempDirTestCase):
+  """Test our helper library for storing processed build ids."""
+
+  def setUp(self):
+    self.testfile = os.path.join(self.tempdir, 'testfile.json')
+
+  def testStartStop(self):
+    with processed_builds.ProcessedBuildsStorage(self.testfile):
+      pass
+
+    self.assertFileContents(self.testfile, '{}')
+
+  def testFetchEmpty(self):
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      self.assertEqual(ps.GetProcessedBuilds('branch', 'target'), [])
+
+    self.assertFileContents(self.testfile, '{"branch": {"target": []}}')
+
+  def testPurgeEmpty(self):
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      ps.PurgeOldBuilds('branch', 'target', [1, 2, 3])
+
+    self.assertFileContents(self.testfile, '{"branch": {"target": []}}')
+
+  def testAddEmpty(self):
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      ps.AddProcessedBuild('branch', 'target', 1)
+
+    self.assertFileContents(self.testfile, '{"branch": {"target": [1]}}')
+
+  def testMultipleUses(self):
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      ps.AddProcessedBuild('branch', 'target', 1)
+      ps.AddProcessedBuild('branch', 'target', 2)
+
+    self.assertFileContents(self.testfile, '{"branch": {"target": [1, 2]}}')
+
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      # Try adding twice, should only happen once.
+      ps.AddProcessedBuild('branch', 'target', 3)
+      ps.AddProcessedBuild('branch', 'target', 3)
+
+    self.assertFileContents(self.testfile, '{"branch": {"target": [1, 2, 3]}}')
+
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      ps.PurgeOldBuilds('branch', 'target', [2, 3])
+      ps.AddProcessedBuild('branch', 'target', 4)
+
+    self.assertFileContents(self.testfile, '{"branch": {"target": [2, 3, 4]}}')
+
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      self.assertEqual(ps.GetProcessedBuilds('branch', 'target'), [2, 3, 4])
+
+  def testAddMultipleBranchTargets(self):
+    with processed_builds.ProcessedBuildsStorage(self.testfile) as ps:
+      ps.AddProcessedBuild('branch1', 'target', 1)
+      ps.AddProcessedBuild('branch2', 'target', 1)
+      ps.AddProcessedBuild('branch2', 'target', 2)
+      ps.AddProcessedBuild('branch2', 'target2', 3)
+
+      self.assertEqual(ps.GetProcessedBuilds('branch2', 'target'),
+                       [1, 2])
+
+    self.assertFileContents(
+        self.testfile,
+        '{"branch1": {"target": [1]},'
+        ' "branch2": {"target": [1, 2], "target2": [3]}}')
diff --git a/lib/operation.py b/lib/operation.py
index 785278c..ca866a7 100644
--- a/lib/operation.py
+++ b/lib/operation.py
@@ -24,7 +24,6 @@
   # pylint: disable=import-error
   import queue as Queue
 import re
-import shutil
 import struct
 import sys
 import termios
@@ -33,7 +32,6 @@
 from chromite.lib import cros_logging as logging
 from chromite.lib import osutils
 from chromite.lib import parallel
-from chromite.lib import workspace_lib
 from chromite.lib.terminal import Color
 
 # Define filenames for captured stdout and stderr.
@@ -76,7 +74,6 @@
     self._stdout_path = None
     self._stderr_path = None
     self._progress_bar_displayed = False
-    self._workspace_path = workspace_lib.WorkspacePath()
     self._isatty = os.isatty(sys.stdout.fileno())
 
   def _GetTerminalSize(self, fd=pty.STDOUT_FILENO):
@@ -174,24 +171,12 @@
     try:
       with cros_build_lib.OutputCapturer(
           stdout_path=self._stdout_path, stderr_path=self._stderr_path,
-          quiet_fail=self._workspace_path is not None):
+          quiet_fail=False):
         func(*args, **kwargs)
     finally:
       self._queue.put(_BackgroundTaskComplete())
       logging.getLogger().setLevel(restore_log_level)
 
-  def MoveStdoutStderrFiles(self):
-    """On failure, move stdout/stderr files to workspace/WORKSPACE_LOGS_DIR."""
-    path = os.path.join(self._workspace_path, workspace_lib.WORKSPACE_LOGS_DIR)
-    # TODO(ralphnathan): Not sure if we need this because it should be done when
-    # we store the log file for brillo commands.
-    osutils.SafeMakedirs(path)
-    osutils.SafeUnlink(os.path.join(path, STDOUT_FILE))
-    shutil.move(self._stdout_path, path)
-    osutils.SafeUnlink(os.path.join(path, STDERR_FILE))
-    shutil.move(self._stderr_path, path)
-    logging.warning('Please look at %s for more information.', path)
-
   # TODO (ralphnathan): Store PID of spawned process.
   def Run(self, func, *args, **kwargs):
     """Run func, parse its output, and update the progress bar.
@@ -235,9 +220,6 @@
         # touching the progress bar.
         sys.stdout.write('\n')
         logging.error('Oops. Something went wrong.')
-        # Move the stdout/stderr files to a location that the user can access.
-        if self._workspace_path is not None:
-          self.MoveStdoutStderrFiles()
         # Raise the exception so it can be caught again.
         raise
 
diff --git a/lib/operation_unittest.py b/lib/operation_unittest.py
index ea9e184..01e641d 100644
--- a/lib/operation_unittest.py
+++ b/lib/operation_unittest.py
@@ -13,9 +13,7 @@
 from chromite.lib import cros_logging as logging
 from chromite.lib import cros_test_lib
 from chromite.lib import operation
-from chromite.lib import osutils
 from chromite.lib import parallel
-from chromite.lib import workspace_lib
 
 
 class TestWrapperProgressBarOperation(operation.ProgressBarOperation):
@@ -40,7 +38,7 @@
   """Fake exception used for testing exception handling."""
 
 
-class ProgressBarOperationTest(cros_test_lib.WorkspaceTestCase,
+class ProgressBarOperationTest(cros_test_lib.MockTestCase,
                                cros_test_lib.OutputTestCase,
                                cros_test_lib.LoggingTestCase):
   """Test the Progress Bar Operation class."""
@@ -53,16 +51,6 @@
         return_value=operation._TerminalSize(100, terminal_width))
     self.PatchObject(os, 'isatty', return_value=True)
 
-  def _GetStdoutPath(self):
-    """Return path to the file where stdout is captured."""
-    return os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
-                        operation.STDOUT_FILE)
-
-  def _GetStderrPath(self):
-    """Return path to the file where stderr is captured."""
-    return os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
-                        operation.STDERR_FILE)
-
   def _VerifyProgressBar(self, width, percent, expected_shaded,
                          expected_unshaded):
     """Helper to test progress bar with different percentages and lengths."""
@@ -130,8 +118,8 @@
     #   called once.
     self.AssertOutputContainsLine('Calling ParseOutput')
 
-  def testExceptionHandlingNotInWorkspace(self):
-    """Test exception handling if not in a workspace."""
+  def testExceptionHandling(self):
+    """Test exception handling."""
     def func():
       print('foo')
       print('bar', file=sys.stderr)
@@ -152,64 +140,6 @@
     self.AssertOutputContainsLine('foo')
     self.AssertOutputContainsLine('bar', check_stderr=True)
 
-  def testExceptionHandlingInWorkspace(self):
-    """Test that stdout/stderr files are moved correctly if in a workspace."""
-    def func():
-      print('foo')
-      print('bar', file=sys.stderr)
-      raise FakeException()
-
-    self.CreateWorkspace()
-    op = TestWrapperProgressBarOperation()
-    stdout_file = self._GetStdoutPath()
-    stderr_file = self._GetStderrPath()
-
-    # Check that the files don't exist before the operation is called.
-    self.assertNotExists(stdout_file)
-    self.assertNotExists(stderr_file)
-
-    try:
-      with cros_test_lib.LoggingCapturer() as logs:
-        op.Run(func)
-    except parallel.BackgroundFailure as e:
-      if not e.HasFailureType(FakeException):
-        raise e
-
-    # Check that the files have been moved to the right location.
-    self.assertExists(stdout_file)
-    self.assertExists(stderr_file)
-
-    # Check that the log message contains the path.
-    self.AssertLogsContain(logs, self.workspace_path)
-
-  def testExceptionHandlingInWorkspaceFilesAlreadyExist(self):
-    """Test that old stdout/stderr files are removed from log directory."""
-    def func():
-      print('foo')
-      print('bar', file=sys.stderr)
-      raise FakeException()
-
-    self.CreateWorkspace()
-    op = TestWrapperProgressBarOperation()
-    stdout_file = self._GetStdoutPath()
-    stderr_file = self._GetStderrPath()
-    osutils.Touch(stdout_file, makedirs=True)
-    osutils.Touch(stderr_file, makedirs=True)
-
-    # Assert that the files are empty.
-    self.assertEqual(osutils.ReadFile(stdout_file), '')
-    self.assertEqual(osutils.ReadFile(stderr_file), '')
-
-    try:
-      op.Run(func)
-    except parallel.BackgroundFailure as e:
-      if not e.HasFailureType(FakeException):
-        raise e
-
-    # Check that the files contain the right information.
-    self.assertIn('foo', osutils.ReadFile(stdout_file))
-    self.assertIn('bar', osutils.ReadFile(stderr_file))
-
   def testLogLevel(self):
     """Test that the log level of the function running is set correctly."""
     func_log_level = logging.DEBUG
diff --git a/lib/patch.py b/lib/patch.py
index 8271df6..09f7132 100644
--- a/lib/patch.py
+++ b/lib/patch.py
@@ -54,6 +54,7 @@
 ATTR_FAIL_COUNT = 'fail_count'
 ATTR_PASS_COUNT = 'pass_count'
 ATTR_TOTAL_FAIL_COUNT = 'total_fail_count'
+ATTR_COMMIT_MESSAGE = 'commit_message'
 
 ALL_ATTRS = (
     ATTR_REMOTE,
@@ -69,6 +70,7 @@
     ATTR_FAIL_COUNT,
     ATTR_PASS_COUNT,
     ATTR_TOTAL_FAIL_COUNT,
+    ATTR_COMMIT_MESSAGE,
 )
 
 def ParseSHA1(text, error_ok=True):
@@ -1499,7 +1501,8 @@
 
   def __init__(self, project_url, project, ref, tracking_branch, remote,
                sha1, change_id, gerrit_number, patch_number, owner_email=None,
-               fail_count=0, pass_count=0, total_fail_count=0):
+               fail_count=0, pass_count=0, total_fail_count=0,
+               commit_message=None):
     """Initializes a GerritFetchOnlyPatch object."""
     super(GerritFetchOnlyPatch, self).__init__(
         project_url, project, ref, tracking_branch, remote,
@@ -1520,6 +1523,10 @@
     self.fail_count = fail_count
     self.pass_count = pass_count
     self.total_fail_count = total_fail_count
+    # commit_message is herited from GitRepoPatch, only override it when passed
+    # in value is not None.
+    if commit_message:
+      self.commit_message = commit_message
 
   @classmethod
   def FromAttrDict(cls, attr_dict):
@@ -1541,7 +1548,10 @@
                                 fail_count=int(attr_dict[ATTR_FAIL_COUNT]),
                                 pass_count=int(attr_dict[ATTR_PASS_COUNT]),
                                 total_fail_count=int(
-                                    attr_dict[ATTR_TOTAL_FAIL_COUNT]))
+                                    attr_dict[ATTR_TOTAL_FAIL_COUNT]),
+                                commit_message=attr_dict.get(
+                                    ATTR_COMMIT_MESSAGE))
+
 
   def _EnsureId(self, commit_message):
     """Ensure we have a usable Change-Id
@@ -1596,6 +1606,7 @@
         ATTR_FAIL_COUNT: str(self.fail_count),
         ATTR_PASS_COUNT: str(self.pass_count),
         ATTR_TOTAL_FAIL_COUNT: str(self.total_fail_count),
+        ATTR_COMMIT_MESSAGE: self.commit_message,
     }
 
     return attr_dict
diff --git a/lib/patch_unittest.py b/lib/patch_unittest.py
index a3f2c4c..2a2bcd8 100644
--- a/lib/patch_unittest.py
+++ b/lib/patch_unittest.py
@@ -460,6 +460,92 @@
     self.assertIn('Change-Id: %s\n' % changeid, patch.commit_message)
 
 
+class TestGerritFetchOnlyPatch(cros_test_lib.MockTestCase):
+  """Test of GerritFetchOnlyPatch."""
+
+  def testFromAttrDict(self):
+    """Test whether FromAttrDict can handle with commit message."""
+    attr_dict_without_msg = {
+        cros_patch.ATTR_PROJECT_URL: 'https://host/chromite/tacos',
+        cros_patch.ATTR_PROJECT: 'chromite/tacos',
+        cros_patch.ATTR_REF: 'refs/changes/11/12345/4',
+        cros_patch.ATTR_BRANCH: 'master',
+        cros_patch.ATTR_REMOTE: 'cros-internal',
+        cros_patch.ATTR_COMMIT: '7181e4b5e182b6f7d68461b04253de095bad74f9',
+        cros_patch.ATTR_CHANGE_ID: 'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+        cros_patch.ATTR_GERRIT_NUMBER: '12345',
+        cros_patch.ATTR_PATCH_NUMBER: '4',
+        cros_patch.ATTR_OWNER_EMAIL: 'foo@chromium.org',
+        cros_patch.ATTR_FAIL_COUNT: 1,
+        cros_patch.ATTR_PASS_COUNT: 1,
+        cros_patch.ATTR_TOTAL_FAIL_COUNT: 3}
+
+    attr_dict_with_msg = {
+        cros_patch.ATTR_PROJECT_URL: 'https://host/chromite/tacos',
+        cros_patch.ATTR_PROJECT: 'chromite/tacos',
+        cros_patch.ATTR_REF: 'refs/changes/11/12345/4',
+        cros_patch.ATTR_BRANCH: 'master',
+        cros_patch.ATTR_REMOTE: 'cros-internal',
+        cros_patch.ATTR_COMMIT: '7181e4b5e182b6f7d68461b04253de095bad74f9',
+        cros_patch.ATTR_CHANGE_ID: 'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+        cros_patch.ATTR_GERRIT_NUMBER: '12345',
+        cros_patch.ATTR_PATCH_NUMBER: '4',
+        cros_patch.ATTR_OWNER_EMAIL: 'foo@chromium.org',
+        cros_patch.ATTR_FAIL_COUNT: 1,
+        cros_patch.ATTR_PASS_COUNT: 1,
+        cros_patch.ATTR_TOTAL_FAIL_COUNT: 3,
+        cros_patch.ATTR_COMMIT_MESSAGE: 'commit message'}
+
+    self.PatchObject(cros_patch.GitRepoPatch, '_AddFooters',
+                     return_value='commit message')
+
+    result_1 = (cros_patch.GerritFetchOnlyPatch.
+                FromAttrDict(attr_dict_without_msg).commit_message)
+    result_2 = (cros_patch.GerritFetchOnlyPatch.
+                FromAttrDict(attr_dict_with_msg).commit_message)
+    self.assertEqual(None, result_1)
+    self.assertEqual('commit message', result_2)
+
+  def testGetAttributeDict(self):
+    """Test Whether GetAttributeDict can get the commit message properly."""
+    change = cros_patch.GerritFetchOnlyPatch(
+        'https://host/chromite/tacos',
+        'chromite/tacos',
+        'refs/changes/11/12345/4',
+        'master',
+        'cros-internal',
+        '7181e4b5e182b6f7d68461b04253de095bad74f9',
+        'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+        '12345',
+        '4',
+        'foo@chromium.org',
+        1,
+        1,
+        3)
+
+    expected = {
+        cros_patch.ATTR_PROJECT_URL: 'https://host/chromite/tacos',
+        cros_patch.ATTR_PROJECT: 'chromite/tacos',
+        cros_patch.ATTR_REF: 'refs/changes/11/12345/4',
+        cros_patch.ATTR_BRANCH: 'master',
+        cros_patch.ATTR_REMOTE: 'cros-internal',
+        cros_patch.ATTR_COMMIT: '7181e4b5e182b6f7d68461b04253de095bad74f9',
+        cros_patch.ATTR_CHANGE_ID: 'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+        cros_patch.ATTR_GERRIT_NUMBER: '12345',
+        cros_patch.ATTR_PATCH_NUMBER: '4',
+        cros_patch.ATTR_OWNER_EMAIL: 'foo@chromium.org',
+        cros_patch.ATTR_FAIL_COUNT: '1',
+        cros_patch.ATTR_PASS_COUNT: '1',
+        cros_patch.ATTR_TOTAL_FAIL_COUNT: '3',
+        cros_patch.ATTR_COMMIT_MESSAGE: None}
+    self.assertEqual(change.GetAttributeDict(), expected)
+
+    self.PatchObject(cros_patch.GitRepoPatch, '_AddFooters',
+                     return_value='commit message')
+    change.commit_message = 'commit message'
+    expected[cros_patch.ATTR_COMMIT_MESSAGE] = 'commit message'
+    self.assertEqual(change.GetAttributeDict(), expected)
+
 
 class TestGetOptionLinesFromCommitMessage(cros_test_lib.TestCase):
   """Tests of GetOptionFromCommitMessage."""
diff --git a/lib/path_util.py b/lib/path_util.py
index 0990238..bf4e0b6 100644
--- a/lib/path_util.py
+++ b/lib/path_util.py
@@ -11,7 +11,6 @@
 import tempfile
 
 from chromite.cbuildbot import constants
-from chromite.lib import bootstrap_lib
 from chromite.lib import cros_build_lib
 from chromite.lib import git
 from chromite.lib import osutils
@@ -23,7 +22,6 @@
 CHECKOUT_TYPE_UNKNOWN = 'unknown'
 CHECKOUT_TYPE_GCLIENT = 'gclient'
 CHECKOUT_TYPE_REPO = 'repo'
-CHECKOUT_TYPE_SDK_BOOTSTRAP = 'bootstrap'
 
 CheckoutInfo = collections.namedtuple(
     'CheckoutInfo', ['type', 'root', 'chrome_src_dir'])
@@ -234,28 +232,6 @@
     return self._ConvertPath(path, self._GetHostPath)
 
 
-def _IsSdkBootstrapCheckout(path):
-  """Return True if |path| is an SDK bootstrap.
-
-  A bootstrap is a lone git checkout of chromite. It cannot be managed by repo.
-  Underneath this bootstrap chromite, there are several SDK checkouts, each
-  managed by repo.
-  """
-  submodule_git = os.path.join(path, '.git')
-  if not git.IsSubmoduleCheckoutRoot(submodule_git, 'origin',
-                                     constants.CHROMITE_URL):
-    # Not a git checkout of chromite.
-    return False
-
-  # This could be an SDK under sdk_checkouts or the parent bootstrap.
-  # It'll be an SDK checkout if it has a parent ".repo".
-  if git.FindRepoDir(path):
-    # It is managed by repo, therefore it is a child SDK checkout.
-    return False
-
-  return True
-
-
 def DetermineCheckout(cwd):
   """Gather information on the checkout we are in.
 
@@ -263,11 +239,6 @@
   This function determines what checkout type |cwd| is in, for example, if |cwd|
   belongs to a `repo` checkout.
 
-  There is a special case when |cwd| is a child SDK checkout of a bootstrap
-  chromite (e.g. something under chromite/sdk_checkouts/xxx.yyy.zzz/). This
-  case should report that |cwd| belongs to a bootstrap checkout instead of the
-  `repo` checkout of the "xxx.yyy.zzz" child SDK.
-
   Returns:
     A CheckoutInfo object with these attributes:
       type: The type of checkout.  Valid values are CHECKOUT_TYPE_*.
@@ -278,24 +249,15 @@
   checkout_type = CHECKOUT_TYPE_UNKNOWN
   root, path = None, None
 
-  # Check for SDK bootstrap first because it goes top to bottom.
-  # If we do it bottom to top, we'll hit chromite/sdk_checkouts/*/.repo first
-  # and will wrongly conclude that this is a repo checkout. So we go top down
-  # to visit chromite/ first.
-  for path in osutils.IteratePaths(cwd):
-    if _IsSdkBootstrapCheckout(path):
-      checkout_type = CHECKOUT_TYPE_SDK_BOOTSTRAP
+  for path in osutils.IteratePathParents(cwd):
+    gclient_file = os.path.join(path, '.gclient')
+    if os.path.exists(gclient_file):
+      checkout_type = CHECKOUT_TYPE_GCLIENT
       break
-  else:
-    for path in osutils.IteratePathParents(cwd):
-      gclient_file = os.path.join(path, '.gclient')
-      if os.path.exists(gclient_file):
-        checkout_type = CHECKOUT_TYPE_GCLIENT
-        break
-      repo_dir = os.path.join(path, '.repo')
-      if os.path.isdir(repo_dir):
-        checkout_type = CHECKOUT_TYPE_REPO
-        break
+    repo_dir = os.path.join(path, '.repo')
+    if os.path.isdir(repo_dir):
+      checkout_type = CHECKOUT_TYPE_REPO
+      break
 
   if checkout_type != CHECKOUT_TYPE_UNKNOWN:
     root = path
@@ -315,9 +277,6 @@
   path = None
   if checkout.type == CHECKOUT_TYPE_REPO:
     path = os.path.join(checkout.root, GENERAL_CACHE_DIR)
-  elif checkout.type == CHECKOUT_TYPE_SDK_BOOTSTRAP:
-    path = os.path.join(checkout.root, bootstrap_lib.SDK_CHECKOUTS,
-                        GENERAL_CACHE_DIR)
   elif checkout.type == CHECKOUT_TYPE_GCLIENT:
     path = os.path.join(checkout.root, CHROME_CACHE_DIR)
   elif checkout.type == CHECKOUT_TYPE_UNKNOWN:
diff --git a/lib/path_util_unittest.py b/lib/path_util_unittest.py
index 3fe93d1..4599978 100644
--- a/lib/path_util_unittest.py
+++ b/lib/path_util_unittest.py
@@ -12,7 +12,6 @@
 import tempfile
 
 from chromite.cbuildbot import constants
-from chromite.lib import bootstrap_lib
 from chromite.lib import cros_build_lib_unittest
 from chromite.lib import cros_test_lib
 from chromite.lib import git
@@ -111,28 +110,8 @@
     self.RunTest(['a/.git/'], 'a', None,
                  path_util.CHECKOUT_TYPE_UNKNOWN, None)
 
-  def testSdkBootstrap(self):
-    """Recognizes an SDK bootstrap case."""
-    self.rc_mock.AddCmdResult(
-        partial_mock.In('config'), output=constants.CHROMITE_URL)
-    dir_struct = [
-        'a/.git/',
-        'a/sdk_checkouts/1.0.0/.repo',
-        'a/sdk_checkouts/1.0.0/chromite/.git',
-    ]
-    self.RunTest(dir_struct, 'a', 'a',
-                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
-    self.RunTest(dir_struct, 'a/b', 'a',
-                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
-    self.RunTest(dir_struct, 'a/sdk_checkouts', 'a',
-                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
-    self.RunTest(dir_struct, 'a/sdk_checkouts/1.0.0', 'a',
-                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
-    self.RunTest(dir_struct, 'a/sdk_checkouts/1.0.0/chromite', 'a',
-                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
 
-
-class FindCacheDirTest(cros_test_lib.WorkspaceTestCase):
+class FindCacheDirTest(cros_test_lib.MockTempDirTestCase):
   """Test cache dir specification and finding functionality."""
 
   def setUp(self):
@@ -145,10 +124,6 @@
     self.repo_root = os.path.join(self.tempdir, 'repo')
     self.gclient_root = os.path.join(self.tempdir, 'gclient')
     self.nocheckout_root = os.path.join(self.tempdir, 'nothing')
-    self.CreateBootstrap('1.0.0')
-    self.bootstrap_cache = os.path.join(
-        self.bootstrap_path, bootstrap_lib.SDK_CHECKOUTS,
-        path_util.GENERAL_CACHE_DIR)
 
     self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
     self.cwd_mock = self.PatchObject(os, 'getcwd')
@@ -174,35 +149,6 @@
         path_util.FindCacheDir(),
         os.path.join(tempfile.gettempdir(), ''))
 
-  def testBootstrap(self):
-    """Test when running from bootstrap."""
-    self.cwd_mock.return_value = self.bootstrap_path
-    self.rc_mock.AddCmdResult(
-        partial_mock.In('config'), output=constants.CHROMITE_URL)
-    self.assertEquals(
-        path_util.FindCacheDir(),
-        self.bootstrap_cache)
-
-  def testSdkCheckoutsInsideBootstrap(self):
-    """Test when in the bootstrap SDK checkout location."""
-    self.cwd_mock.return_value = os.path.join(
-        self.bootstrap_path, bootstrap_lib.SDK_CHECKOUTS)
-    self.rc_mock.AddCmdResult(
-        partial_mock.In('config'), output=constants.CHROMITE_URL)
-    self.assertEquals(
-        path_util.FindCacheDir(),
-        self.bootstrap_cache)
-
-  def testSdkInsideBootstrap(self):
-    """Test when in an SDK checkout inside the bootstrap."""
-    self.cwd_mock.return_value = os.path.join(
-        self.bootstrap_path, bootstrap_lib.SDK_CHECKOUTS, '1.0.0', 'chromite')
-    self.rc_mock.AddCmdResult(
-        partial_mock.In('config'), output=constants.CHROMITE_URL)
-    self.assertEquals(
-        path_util.FindCacheDir(),
-        self.bootstrap_cache)
-
 
 class TestPathResolver(cros_test_lib.MockTestCase):
   """Tests of ChrootPathResolver class."""
diff --git a/lib/paygen/gslock_unittest.py b/lib/paygen/gslock_unittest.py
index 70a60a6..19347e6 100644
--- a/lib/paygen/gslock_unittest.py
+++ b/lib/paygen/gslock_unittest.py
@@ -7,8 +7,6 @@
 from __future__ import print_function
 
 import multiprocessing
-import os
-import socket
 
 from chromite.lib import cros_build_lib
 from chromite.lib import cros_test_lib
@@ -108,42 +106,33 @@
 class GSLockTest(cros_test_lib.MockTestCase):
   """This test suite covers the GSLock file."""
 
+  # For contention tests, how many parallel workers to spawn.  To really
+  # stress test, you can bump it up to 200, but 20 seems to provide good
+  # coverage w/out sucking up too many resources.
+  NUM_THREADS = 20
+
   @cros_test_lib.NetworkTest()
   def setUp(self):
     self.ctx = gs.GSContext()
 
-    # Use the unique id to make sure the tests can be run multiple places.
-    unique_id = '%s.%d' % (socket.gethostname(), os.getpid())
-
-    self.lock_uri = 'gs://chromeos-releases-test/test-%s-gslock' % unique_id
-    self.data_uri = 'gs://chromeos-releases-test/test-%s-data' % unique_id
-
-    # Clear out any flags left from previous failure
-    self.ctx.Remove(self.lock_uri, ignore_missing=True)
-    self.ctx.Remove(self.data_uri, ignore_missing=True)
-
-  @cros_test_lib.NetworkTest()
-  def tearDown(self):
-    self.assertFalse(self.ctx.Exists(self.lock_uri))
-    self.assertFalse(self.ctx.Exists(self.data_uri))
-
   @cros_test_lib.NetworkTest()
   def testLock(self):
     """Test getting a lock."""
     # Force a known host name.
     self.PatchObject(cros_build_lib, 'MachineDetails', return_value='TestHost')
 
-    lock = gslock.Lock(self.lock_uri)
+    with gs.TemporaryURL('gslock') as lock_uri:
+      lock = gslock.Lock(lock_uri)
 
-    self.assertFalse(self.ctx.Exists(self.lock_uri))
-    lock.Acquire()
-    self.assertTrue(self.ctx.Exists(self.lock_uri))
+      self.assertFalse(self.ctx.Exists(lock_uri))
+      lock.Acquire()
+      self.assertTrue(self.ctx.Exists(lock_uri))
 
-    contents = self.ctx.Cat(self.lock_uri)
-    self.assertEqual(contents, 'TestHost')
+      contents = self.ctx.Cat(lock_uri)
+      self.assertEqual(contents, 'TestHost')
 
-    lock.Release()
-    self.assertFalse(self.ctx.Exists(self.lock_uri))
+      lock.Release()
+      self.assertFalse(self.ctx.Exists(lock_uri))
 
   @cros_test_lib.NetworkTest()
   def testLockRepetition(self):
@@ -151,117 +140,120 @@
     # Force a known host name.
     self.PatchObject(cros_build_lib, 'MachineDetails', return_value='TestHost')
 
-    lock = gslock.Lock(self.lock_uri)
+    with gs.TemporaryURL('gslock') as lock_uri:
+      lock = gslock.Lock(lock_uri)
 
-    self.assertFalse(self.ctx.Exists(self.lock_uri))
-    lock.Acquire()
-    self.assertTrue(self.ctx.Exists(self.lock_uri))
+      self.assertFalse(self.ctx.Exists(lock_uri))
+      lock.Acquire()
+      self.assertTrue(self.ctx.Exists(lock_uri))
 
-    lock.Acquire()
-    self.assertTrue(self.ctx.Exists(self.lock_uri))
+      lock.Acquire()
+      self.assertTrue(self.ctx.Exists(lock_uri))
 
-    lock.Release()
-    self.assertFalse(self.ctx.Exists(self.lock_uri))
+      lock.Release()
+      self.assertFalse(self.ctx.Exists(lock_uri))
 
-    lock.Acquire()
-    self.assertTrue(self.ctx.Exists(self.lock_uri))
+      lock.Acquire()
+      self.assertTrue(self.ctx.Exists(lock_uri))
 
-    lock.Release()
-    self.assertFalse(self.ctx.Exists(self.lock_uri))
+      lock.Release()
+      self.assertFalse(self.ctx.Exists(lock_uri))
 
   @cros_test_lib.NetworkTest()
   def testLockConflict(self):
     """Test lock conflict."""
+    with gs.TemporaryURL('gslock') as lock_uri:
+      lock1 = gslock.Lock(lock_uri)
+      lock2 = gslock.Lock(lock_uri)
 
-    lock1 = gslock.Lock(self.lock_uri)
-    lock2 = gslock.Lock(self.lock_uri)
+      # Manually lock 1, and ensure lock2 can't lock.
+      lock1.Acquire()
+      self.assertRaises(gslock.LockNotAcquired, lock2.Acquire)
+      lock1.Release()
 
-    # Manually lock 1, and ensure lock2 can't lock.
-    lock1.Acquire()
-    self.assertRaises(gslock.LockNotAcquired, lock2.Acquire)
-    lock1.Release()
+      # Use a with clause on 2, and ensure 1 can't lock.
+      with lock2:
+        self.assertRaises(gslock.LockNotAcquired, lock1.Acquire)
 
-    # Use a with clause on 2, and ensure 1 can't lock.
-    with lock2:
-      self.assertRaises(gslock.LockNotAcquired, lock1.Acquire)
+      # Ensure we can renew a given lock.
+      lock1.Acquire()
+      lock1.Renew()
+      lock1.Release()
 
-    # Ensure we can renew a given lock.
-    lock1.Acquire()
-    lock1.Renew()
-    lock1.Release()
-
-    # Ensure we get an error renewing a lock we don't hold.
-    self.assertRaises(gslock.LockNotAcquired, lock1.Renew)
+      # Ensure we get an error renewing a lock we don't hold.
+      self.assertRaises(gslock.LockNotAcquired, lock1.Renew)
 
   @cros_test_lib.NetworkTest()
   def testLockTimeout(self):
     """Test getting a lock when an old timed out one is present."""
+    with gs.TemporaryURL('gslock') as lock_uri:
+      # Both locks are always timed out.
+      lock1 = gslock.Lock(lock_uri, lock_timeout_mins=-1)
+      lock2 = gslock.Lock(lock_uri, lock_timeout_mins=-1)
 
-    # Both locks are always timed out.
-    lock1 = gslock.Lock(self.lock_uri, lock_timeout_mins=-1)
-    lock2 = gslock.Lock(self.lock_uri, lock_timeout_mins=-1)
-
-    lock1.Acquire()
-    lock2.Acquire()
-
-    self.ctx.Remove(self.lock_uri)
+      lock1.Acquire()
+      lock2.Acquire()
 
   @cros_test_lib.NetworkTest()
   def testRaceToAcquire(self):
     """Have lots of processes race to acquire the same lock."""
-    count = 20
+    count = self.NUM_THREADS
     pool = multiprocessing.Pool(processes=count)
-    results = pool.map(_InProcessAcquire, [self.lock_uri] * count)
+    with gs.TemporaryURL('gslock') as lock_uri:
+      results = pool.map(_InProcessAcquire, [lock_uri] * count)
 
-    # Clean up the lock since the processes explicitly only acquire.
-    self.ctx.Remove(self.lock_uri)
+      # Clean up the lock since the processes explicitly only acquire.
+      self.ctx.Remove(lock_uri)
 
-    # Ensure that only one of them got the lock.
-    self.assertEqual(results.count(True), 1)
+      # Ensure that only one of them got the lock.
+      self.assertEqual(results.count(True), 1)
 
   @cros_test_lib.NetworkTest()
   def testRaceToDoubleAcquire(self):
     """Have lots of processes race to double acquire the same lock."""
-    count = 20
+    count = self.NUM_THREADS
     pool = multiprocessing.Pool(processes=count)
-    results = pool.map(_InProcessDoubleAcquire, [self.lock_uri] * count)
+    with gs.TemporaryURL('gslock') as lock_uri:
+      results = pool.map(_InProcessDoubleAcquire, [lock_uri] * count)
 
-    # Clean up the lock sinc the processes explicitly only acquire.
-    self.ctx.Remove(self.lock_uri)
+      # Clean up the lock sinc the processes explicitly only acquire.
+      self.ctx.Remove(lock_uri)
 
-    # Ensure that only one of them got the lock (and got it twice).
-    self.assertEqual(results.count(0), count - 1)
-    self.assertEqual(results.count(2), 1)
+      # Ensure that only one of them got the lock (and got it twice).
+      self.assertEqual(results.count(0), count - 1)
+      self.assertEqual(results.count(2), 1)
 
   @cros_test_lib.NetworkTest()
   def testMultiProcessDataUpdate(self):
     """Have lots of processes update a GS file proctected by a lock."""
-    count = 20   # To really stress, bump up to 200.
+    count = self.NUM_THREADS
     pool = multiprocessing.Pool(processes=count)
-    results = pool.map(_InProcessDataUpdate,
-                       [(self.lock_uri, self.data_uri)] * count)
+    with gs.TemporaryURL('gslock') as lock_uri:
+      data_uri = lock_uri + '.data'
+      results = pool.map(_InProcessDataUpdate,
+                         [(lock_uri, data_uri)] * count)
 
-    self.assertEqual(self.ctx.Cat(self.data_uri), str(count))
+      self.assertEqual(self.ctx.Cat(data_uri), str(count))
 
-    # Ensure that all report success
-    self.assertEqual(results.count(True), count)
-
-    # Clean up the data file.
-    self.ctx.Remove(self.data_uri)
+      # Ensure that all report success
+      self.assertEqual(results.count(True), count)
 
   @cros_test_lib.NetworkTest()
   def testDryrunLock(self):
     """Ensure that lcok can be obtained and released in dry-run mode."""
-    lock = gslock.Lock(self.lock_uri, dry_run=True)
-    self.assertIsNone(lock.Acquire())
-    self.assertFalse(self.ctx.Exists(self.lock_uri))
-    self.assertIsNone(lock.Release())
+    with gs.TemporaryURL('gslock') as lock_uri:
+      lock = gslock.Lock(lock_uri, dry_run=True)
+      self.assertIsNone(lock.Acquire())
+      self.assertFalse(self.ctx.Exists(lock_uri))
+      self.assertIsNone(lock.Release())
 
+  @cros_test_lib.NetworkTest()
   def testDryrunLockRepetition(self):
     """Test aquiring same lock multiple times in dry-run mode."""
-    lock = gslock.Lock(self.lock_uri, dry_run=True)
-    self.assertIsNone(lock.Acquire())
-    self.assertIsNone(lock.Acquire())
-    self.assertIsNone(lock.Release())
-    self.assertIsNone(lock.Acquire())
-    self.assertIsNone(lock.Release())
+    with gs.TemporaryURL('gslock') as lock_uri:
+      lock = gslock.Lock(lock_uri, dry_run=True)
+      self.assertIsNone(lock.Acquire())
+      self.assertIsNone(lock.Acquire())
+      self.assertIsNone(lock.Release())
+      self.assertIsNone(lock.Acquire())
+      self.assertIsNone(lock.Release())
diff --git a/lib/paygen/gspaths.py b/lib/paygen/gspaths.py
index efcc6d7..87d6232 100644
--- a/lib/paygen/gspaths.py
+++ b/lib/paygen/gspaths.py
@@ -138,9 +138,19 @@
     src_image: A representation of image it updates from. None for
                Full updates, or the same type as tgt_image otherwise.
     uri: The URI of the payload. This can be any format understood by urilib.
+    labels: A list of strings. Labels are used to catalogue payloads.
+    skip: A boolean. If true, we skip generating this payload.
+    exists: A boolean. If true, artifacts for this build already exist.
   """
   _name = 'Payload definition'
-  _slots = ('tgt_image', 'src_image', 'uri')
+  _slots = ('tgt_image', 'src_image', 'uri', 'labels', 'skip', 'exists')
+
+  def __init__(self, labels=None, skip=False, exists=False, *args, **kwargs):
+    kwargs.update(labels=labels, skip=skip, exists=exists)
+    super(Payload, self).__init__(*args, **kwargs)
+
+    if self['labels'] is None:
+      self['labels'] = []
 
   def __str__(self):
     if self.uri:
diff --git a/lib/paygen/gspaths_unittest.py b/lib/paygen/gspaths_unittest.py
index bc506ec..4b581dc 100644
--- a/lib/paygen/gspaths_unittest.py
+++ b/lib/paygen/gspaths_unittest.py
@@ -514,6 +514,9 @@
                                        key=self.key),
             'src_image': None,
             'uri': full_uri,
+            'labels': [],
+            'skip': False,
+            'exists': False
         })
 
     self.assertDictEqual(
@@ -527,6 +530,9 @@
                                        version=self.version,
                                        key=self.key),
             'uri': delta_uri,
+            'labels': [],
+            'skip': False,
+            'exists': False
         })
 
     self.assertDictEqual(
@@ -541,6 +547,9 @@
                                        image_channel='image-channel'),
             'src_image': None,
             'uri': max_full_uri,
+            'labels': [],
+            'skip': False,
+            'exists': False
         })
 
     self.assertDictEqual(
@@ -558,6 +567,9 @@
                                        image_version=image_version,
                                        image_channel='image-channel'),
             'uri': max_delta_uri,
+            'labels': [],
+            'skip': False,
+            'exists': False
         })
 
 
diff --git a/lib/paygen/paygen_build_lib.py b/lib/paygen/paygen_build_lib.py
index d86add2..b9d383e 100644
--- a/lib/paygen/paygen_build_lib.py
+++ b/lib/paygen/paygen_build_lib.py
@@ -13,7 +13,6 @@
 
 from __future__ import print_function
 
-import ConfigParser
 import json
 import operator
 import os
@@ -51,17 +50,12 @@
 # will fail. We quietly ignore the failure, but leave bombs around that will
 # explode if people try to really use this library.
 try:
-  from crostools.config import config
-  from crostools.omaha import query
-
   # pylint: disable=F0401
   from site_utils.autoupdate.lib import test_params
   from site_utils.autoupdate.lib import test_control
   # pylint: enable=F0401
 
 except ImportError:
-  config = None
-  query = None
   test_params = None
   test_control = None
 
@@ -72,8 +66,10 @@
 # Used to format timestamps on archived paygen.log file names in GS.
 PAYGEN_LOG_TIMESTAMP_FORMAT = '%Y%m%d-%H%M%S-UTC'
 
-# Used to lookup all FSIs for all boards.
+# Board and device information published by goldeneye.
+BOARDS_URI = 'gs://chromeos-build-release-console/boards.json'
 FSI_URI = 'gs://chromeos-build-release-console/fsis.json'
+OMAHA_URI = 'gs://chromeos-build-release-console/omaha_status.json'
 
 
 class Error(Exception):
@@ -118,7 +114,7 @@
 
 
 class BoardNotConfigured(EarlyExit):
-  """The board does not exist in the crostools release config."""
+  """The board does not exist in the published goldeneye records."""
   RESULT = 26
 
 
@@ -278,6 +274,58 @@
         dry_run=dry_run)
 
 
+class PayloadManager(object):
+  """Helper class for classifying discovered payloads."""
+
+  def __init__(self):
+    self.payloads = []
+
+  def Add(self, labels, payloads, skip=False, exists=False):
+    for p in payloads:
+      self.payloads.append(gspaths.Payload(tgt_image=p.tgt_image,
+                                           src_image=p.src_image,
+                                           uri=p.uri, labels=labels,
+                                           skip=skip, exists=exists))
+
+  def Get(self, labels):
+    """Retrieve all payloads that have label sets that contain |labels|.
+
+    Args:
+      labels: A list of strings.
+
+    Returns:
+      A list of gspath.Payload objects that define |labels|.
+
+    Raises:
+      ValueError if |labels| is not a list.
+    """
+    if not isinstance(labels, list):
+      raise ValueError('PayloadManager.Get expects a list of labels.'
+                       ' Given %s' % type(labels))
+    labels = set(labels)
+    return [p for p in self.payloads
+            if set(p['labels']).issuperset(labels)]
+
+  def GetOnly(self, labels):
+    """Retrieve all payloads with label sets that are equal to |labels|.
+
+    Args:
+      labels: A list of strings.
+
+    Returns:
+      A list of gspath.Payload objects with label sets equal to |labels|.
+
+    Raises:
+      ValueError if |labels| is not a list.
+    """
+    if not isinstance(labels, list):
+      raise ValueError('PayloadManager.GetOnly expects a list of labels.'
+                       ' Given %s' % type(labels))
+
+    labels = set(labels)
+    return [p for p in self.payloads if set(p['labels']) == labels]
+
+
 class _PaygenBuild(object):
   """This class is responsible for generating the payloads for a given build.
 
@@ -366,7 +414,6 @@
     self._skip_nontest_payloads = skip_nontest_payloads
     self._control_dir = control_dir
     self._output_dir = output_dir
-    self._previous_version = None
     self._run_parallel = run_parallel
     self._run_on_builder = run_on_builder
     self._archive_board = None
@@ -374,6 +421,20 @@
     self._archive_build_uri = None
     self._au_generator_uri = au_generator_uri
 
+    # Cached goldeneye data.
+    self.cachedFsisJson = {}
+    self.cachedOmahaJson = {}
+
+  def _GetFsisJson(self):
+    if not self.cachedFsisJson:
+      self.cachedFsisJson = json.loads(gslib.Cat(FSI_URI))
+    return self.cachedFsisJson
+
+  def _GetOmahaJson(self):
+    if not self.cachedOmahaJson:
+      self.cachedOmahaJson = json.loads(gslib.Cat(OMAHA_URI))
+    return self.cachedOmahaJson
+
   def _GetFlagURI(self, flag):
     """Find the URI of the lock file associated with this build.
 
@@ -555,7 +616,7 @@
 
     return images
 
-  def _DiscoverActiveFsiBuilds(self):
+  def _DiscoverFsiBuildsForDeltas(self):
     """Read fsi_images in release.conf.
 
     fsi_images is a list of chromeos versions. We assume each one is
@@ -570,31 +631,61 @@
       List of gspaths.Build instances for each build so discovered. The list
       may be empty.
     """
-    # TODO(dgarrett): Switch to JSON mechanism in _DiscoverAllFsiBuilds
-    #   after it's in production, and after we clear the change with the TPMs.
-    #   At that time, check and ignore FSIs without the is_delta_supported flag.
-    # TODO(pprabhu): Can't switch to _DiscoverAllFsiBuilds till the HACK there
-    #   is removed.
+    results = []
 
     # FSI versions are only defined for the stable-channel.
     if self._build.channel != 'stable-channel':
-      return []
+      return results
 
-    try:
-      fsi_versions = config.GetListValue(self._build.board, 'fsi_images')
-    except ConfigParser.NoOptionError:
-      # fsi_images is an optional field.
-      return []
+    contents = self._GetFsisJson()
 
-    results = []
-    for version in fsi_versions:
-      results.append(gspaths.Build(version=version,
-                                   board=self._build.board,
-                                   channel=self._build.channel,
-                                   bucket=self._build.bucket))
+    for fsi in contents.get('fsis', []):
+      fsi_active = fsi['board']['is_active']
+      fsi_board = fsi['board']['public_codename']
+      fsi_version = fsi['chrome_os_version']
+      fsi_support_delta = fsi['is_delta_supported']
+
+      if fsi_active and fsi_support_delta and fsi_board == self._build.board:
+        results.append(gspaths.Build(version=fsi_version,
+                                     board=fsi_board,
+                                     channel=self._build.channel,
+                                     bucket=self._build.bucket))
+
     return results
 
-  def _DiscoverAllFsiBuilds(self):
+  def _DiscoverAllFsiBuildsForDeltaTesting(self):
+    """Pull FSI list from Golden Eye.
+
+    Finds all FSI builds that are known to support deltas and to be
+    lab stable for testing purposes.
+
+    Returns:
+      A list of gspaths.Build instances for each build so discovered. The list
+      may be empty.
+    """
+    results = []
+
+    contents = self._GetFsisJson()
+
+    for fsi in contents.get('fsis', []):
+      fsi_active = fsi['board']['is_active']
+      fsi_board = fsi['board']['public_codename']
+      fsi_version = fsi['chrome_os_version']
+      fsi_support_delta = fsi['is_delta_supported']
+      fsi_lab_stable = fsi['is_lab_stable']
+
+      conditions = [fsi_board == self._build.board, fsi_active,
+                    fsi_support_delta, fsi_lab_stable]
+
+      if all(conditions):
+        results.append(gspaths.Build(version=fsi_version,
+                                     board=fsi_board,
+                                     channel=self._build.channel,
+                                     bucket=self._build.bucket))
+
+    return results
+
+  def _DiscoverAllFsiBuildsForFullTesting(self):
     """Pull FSI list from Golden Eye.
 
     Returns a list of chromeos versions. We assume each one is
@@ -608,20 +699,15 @@
       may be empty.
     """
     results = []
-    # XXX:HACK -- FSI builds for this board is known to brick the DUTs in the
-    # lab. As a workaround, we're dropping test coverage for this board
-    # temporarily (crbug.com/460174).
-    # TODO(pprabhu) Remove hack once we have a real solution (crbug.com/462320).
-    if self._build.board == 'peach-pit':
-      return results
 
-    contents = json.loads(gslib.Cat(FSI_URI))
+    contents = self._GetFsisJson()
 
     for fsi in contents.get('fsis', []):
       fsi_board = fsi['board']['public_codename']
       fsi_version = fsi['chrome_os_version']
+      fsi_lab_stable = fsi['is_lab_stable']
 
-      if fsi_board == self._build.board:
+      if fsi_lab_stable and fsi_board == self._build.board:
         results.append(fsi_version)
 
     return results
@@ -638,16 +724,25 @@
       know about the currently published version, this always contain zero or
       one entries.
     """
-    self._previous_version = query.FindLatestPublished(self._build.channel,
-                                                       self._build.board)
+    results = []
 
-    if self._previous_version:
-      return [gspaths.Build(gspaths.Build(version=self._previous_version,
-                                          board=self._build.board,
-                                          channel=self._build.channel,
-                                          bucket=self._build.bucket))]
+    # Paygen channel names typically end in '-channel', while Goldeneye
+    # does not maintain the '-channel' ending.
+    channel_name = self._build.channel.replace('-channel', '')
 
-    return []
+    contents = self._GetOmahaJson()
+    for nmo in contents.get('omaha_data', []):
+      nmo_board = nmo['board']['public_codename']
+      nmo_channel = nmo['channel']
+      nmo_version = nmo['chrome_os_version']
+
+      if nmo_board == self._build.board and nmo_channel == channel_name:
+        results.append(gspaths.Build(gspaths.Build(version=nmo_version,
+                                                   board=self._build.board,
+                                                   channel=self._build.channel,
+                                                   bucket=self._build.bucket)))
+
+    return results
 
   def _DiscoverRequiredFullPayloads(self, images):
     """Find the Payload objects for the images from the current build.
@@ -765,9 +860,7 @@
     and may be otherwise detrimental to the release schedule.
 
     Returns:
-      A list of tuples of the form (payload, skip), where payload is an
-      instance of gspath.Payload and skip is a Boolean that says whether it
-      should be skipped (i.e. not generated).
+      A PayloadManager instance.
 
     Raises:
       BuildNotReady: If the current build doesn't seem to have all of it's
@@ -776,12 +869,14 @@
       BuildCorrupt: If current or previous builds have unexpected images.
       ImageMissing: Raised if expected images are missing for previous builds.
     """
-    # Initiate a list that will contain lists of payload subsets, along with a
-    # Boolean stating whether or not we need to skip generating them.
-    payload_sublists_skip = []
+    images = []
+    previous_images = []
+    fsi_images = []
+
+    payload_manager = PayloadManager()
 
     try:
-      # When discovering the images for our current build, they might
+      # When discovering the images for our current build, they might not be
       # discoverable right away (GS eventual consistency). So, we retry.
       images = retry_util.RetryException(ImageMissing, 3,
                                          self._DiscoverImages, self._build,
@@ -795,110 +890,147 @@
 
     _LogList('Images found', images)
 
-    # Discover active FSI builds we need deltas from.
-    fsi_builds = self._DiscoverActiveFsiBuilds()
+    # Discover and filter active FSI builds.
+    fsi_builds = self._DiscoverFsiBuildsForDeltas()
     if fsi_builds:
       _LogList('Active FSI builds considered', fsi_builds)
     else:
       logging.info('No active FSI builds found')
 
-    # Discover other previous builds we need deltas from.
+    for fsi in fsi_builds:
+      fsi_images += self._DiscoverImages(fsi)
+      fsi_images += self._DiscoverTestImageArchives(fsi)
+
+    fsi_images = _FilterForBasic(fsi_images) + _FilterForTest(fsi_images)
+
+    # Discover previous, non-FSI, builds that we also must generate deltas for.
     previous_builds = [b for b in self._DiscoverNmoBuild()
                        if b not in fsi_builds]
     if previous_builds:
-      _LogList('Other previous builds considered', previous_builds)
+      _LogList('Previous, non-FSI, builds considered', previous_builds)
     else:
       logging.info('No other previous builds found')
 
-    # Discover the images from those previous builds, and put them into
-    # a single list. Raises ImageMissing if no images are found.
-    previous_images = []
-    for b in previous_builds:
+    # Discover and filter previous images.
+    for p in previous_builds:
       try:
-        previous_images += self._DiscoverImages(b)
+        previous_images += self._DiscoverImages(p)
       except ImageMissing as e:
         # Temporarily allow generation of delta payloads to fail because of
         # a missing previous build until crbug.com/243916 is addressed.
         # TODO(mtennant): Remove this when bug is fixed properly.
         logging.warning('Previous build image is missing, skipping: %s', e)
 
-        # We also clear the previous version field so that subsequent code does
-        # not attempt to generate a full update test from the N-1 version;
-        # since this version has missing images, no payloads were generated for
-        # it and test generation is bound to fail.
-        # TODO(garnold) This should be reversed together with the rest of this
-        # block.
-        self._previous_version = None
-
         # In this case, we should also skip test image discovery; since no
         # signed deltas will be generated from this build, we don't need to
         # generate test deltas from it.
         continue
+      previous_images += self._DiscoverTestImageArchives(p)
 
-      previous_images += self._DiscoverTestImageArchives(b)
+    previous_images = (
+        _FilterForBasic(previous_images) + _FilterForTest(previous_images))
 
-    for b in fsi_builds:
-      previous_images += self._DiscoverImages(b)
-      previous_images += self._DiscoverTestImageArchives(b)
+    # Discover and catalogue full, non-test payloads.
+    skip_full = self._skip_full_payloads or self._skip_nontest_payloads
 
-    # Only consider base (signed) and test previous images.
-    filtered_previous_images = _FilterForBasic(previous_images)
-    filtered_previous_images += _FilterForTest(previous_images)
-    previous_images = filtered_previous_images
+    # Full payloads for the current build.
+    payload_manager.Add(
+        ['full'],
+        self._DiscoverRequiredFullPayloads(_FilterForImages(images)),
+        skip=skip_full)
 
-    # Generate full payloads for all non-test images in the current build.
-    # Include base, NPO, premp, and mp (if present).
-    payload_sublists_skip.append(
-        (self._skip_full_payloads or self._skip_nontest_payloads,
-         self._DiscoverRequiredFullPayloads(_FilterForImages(images))))
+    # Full payloads for previous builds.
+    payload_manager.Add(
+        ['full', 'previous'],
+        self._DiscoverRequiredFullPayloads(_FilterForImages(previous_images)),
+        skip=skip_full)
+
+    # Discover delta payloads.
+    skip_deltas = self._skip_delta_payloads or self._skip_nontest_payloads
 
     # Deltas for current -> NPO (pre-MP and MP).
-    payload_sublists_skip.append(
-        (self._skip_delta_payloads or self._skip_nontest_payloads,
-         self._DiscoverRequiredNpoDeltas(_FilterForPremp(images))))
-    payload_sublists_skip.append(
-        (self._skip_delta_payloads or self._skip_nontest_payloads,
-         self._DiscoverRequiredNpoDeltas(_FilterForMp(images))))
+    delta_npo_labels = ['delta', 'npo']
+    payload_manager.Add(
+        delta_npo_labels,
+        self._DiscoverRequiredNpoDeltas(_FilterForPremp(images)),
+        skip=skip_deltas)
+    payload_manager.Add(
+        delta_npo_labels,
+        self._DiscoverRequiredNpoDeltas(_FilterForMp(images)),
+        skip=skip_deltas)
 
     # Deltas for previous -> current (pre-MP and MP).
-    payload_sublists_skip.append(
-        (self._skip_delta_payloads or self._skip_nontest_payloads,
-         self._DiscoverRequiredFromPreviousDeltas(
-             _FilterForPremp(_FilterForBasic(images)),
-             _FilterForPremp(previous_images))))
-    payload_sublists_skip.append(
-        (self._skip_delta_payloads or self._skip_nontest_payloads,
-         self._DiscoverRequiredFromPreviousDeltas(
-             _FilterForMp(_FilterForBasic(images)),
-             _FilterForMp(previous_images))))
+    delta_previous_labels = ['delta', 'previous']
+    payload_manager.Add(
+        delta_previous_labels,
+        self._DiscoverRequiredFromPreviousDeltas(
+            _FilterForPremp(_FilterForBasic(images)),
+            _FilterForPremp(previous_images)),
+        skip=skip_deltas)
+    payload_manager.Add(
+        delta_previous_labels,
+        self._DiscoverRequiredFromPreviousDeltas(
+            _FilterForMp(_FilterForBasic(images)),
+            _FilterForMp(previous_images)),
+        skip=skip_deltas)
 
-    # Only discover test payloads if Autotest is not disabled.
+    # Deltats for fsi -> current (pre-MP and MP).
+    delta_fsi_labels = ['delta', 'fsi']
+    payload_manager.Add(
+        delta_fsi_labels,
+        self._DiscoverRequiredFromPreviousDeltas(
+            _FilterForPremp(_FilterForBasic(images)),
+            _FilterForPremp(fsi_images)),
+        skip=skip_deltas)
+    payload_manager.Add(
+        delta_fsi_labels,
+        self._DiscoverRequiredFromPreviousDeltas(
+            _FilterForMp(_FilterForBasic(images)),
+            _FilterForMp(fsi_images)),
+        skip=skip_deltas)
+
+    # Discover test payloads if Autotest is not disabled.
     if self._control_dir:
-      # Full test payloads.
-      payload_sublists_skip.append(
-          (self._skip_full_payloads or self._skip_test_payloads,
-           self._DiscoverRequiredFullPayloads(_FilterForTest(images))))
+      skip_test_full = self._skip_full_payloads or self._skip_test_payloads
+      skip_test_deltas = self._skip_delta_payloads or self._skip_test_payloads
 
-      # Delta for current -> NPO (test payloads).
-      payload_sublists_skip.append(
-          (self._skip_delta_payloads or self._skip_test_payloads,
-           self._DiscoverRequiredTestNpoDeltas(_FilterForTest(images))))
+      # Full test payloads.
+      payload_manager.Add(
+          ['test', 'full'],
+          self._DiscoverRequiredFullPayloads(_FilterForTest(images)),
+          skip=skip_test_full)
+
+      # Full previous payloads.
+      payload_manager.Add(
+          ['test', 'full', 'previous'],
+          self._DiscoverRequiredFullPayloads(_FilterForTest(previous_images)),
+          skip=skip_test_full)
+
+      # Deltas for current -> NPO (test payloads).
+      payload_manager.Add(
+          ['test', 'delta', 'npo'],
+          self._DiscoverRequiredTestNpoDeltas(_FilterForTest(images)),
+          skip=skip_test_deltas)
 
       # Deltas for previous -> current (test payloads).
-      payload_sublists_skip.append(
-          (self._skip_delta_payloads or self._skip_test_payloads,
-           self._DiscoverRequiredFromPreviousDeltas(
-               _FilterForTest(images), _FilterForTest(previous_images))))
+      payload_manager.Add(
+          ['test', 'delta', 'previous'],
+          self._DiscoverRequiredFromPreviousDeltas(
+              _FilterForTest(images), _FilterForTest(previous_images)),
+          skip=skip_test_deltas)
 
-    # Organize everything into a single list of (payload, skip) pairs; also, be
-    # sure to fill in a URL for each payload.
-    payloads_skip = []
-    for (do_skip, payloads) in payload_sublists_skip:
-      for payload in payloads:
-        paygen_payload_lib.FillInPayloadUri(payload)
-        payloads_skip.append((payload, do_skip))
+      # Deltas for fsi -> current (test payloads).
+      payload_manager.Add(
+          ['test', 'delta', 'fsi'],
+          self._DiscoverRequiredFromPreviousDeltas(
+              _FilterForTest(images), _FilterForTest(fsi_images)),
+          skip=skip_test_deltas)
 
-    return payloads_skip
+    # Set the payload URIs.
+    for p in payload_manager.Get([]):
+      paygen_payload_lib.FillInPayloadUri(p)
+
+    return payload_manager
 
   def _GeneratePayloads(self, payloads, lock=None):
     """Generate the payloads called for by a list of payload definitions.
@@ -1012,7 +1144,6 @@
     test = test_params.TestConfig(
         self._archive_board,
         suite_name,               # Name of the test (use the suite name).
-        False,                    # Using test images.
         bool(payload.src_image),  # Whether this is a delta.
         src_version,
         payload.tgt_image.version,
@@ -1192,49 +1323,66 @@
     logging.info('No FSIs with artifacts, not scheduling FSI update test.')
     return []
 
-  def _CreatePayloadTests(self, payloads):
+  def _CreatePayloadTests(self, payload_manager):
     """Returns a list of test configurations for a given list of payloads.
 
     Args:
-      payloads: A list of (already generated) build payloads.
+      payload_manager: A PayloadManager instance.
 
     Returns:
       A list of PayloadTest objects defining payload test cases.
     """
     payload_tests = []
-    for payload in payloads:
-      # We are only testing test payloads.
-      if payload.tgt_image.get('image_type', 'signed') == 'signed':
-        continue
 
-      # Distinguish between delta (source version encoded) and full payloads.
-      if payload.src_image is None:
-        # Create a full update test from NMO, if we are newer.
-        if not self._previous_version:
-          logging.warning('No previous build, not testing full update %s from '
-                          'NMO', payload)
-        elif gspaths.VersionGreater(
-            self._previous_version, payload.tgt_image.version):
+    # Pre-fetch lab stable FSIs.
+    lab_stable_fsi_deltas = self._DiscoverAllFsiBuildsForDeltaTesting()
+    lab_stable_fsi_full = self._DiscoverAllFsiBuildsForFullTesting()
+
+    def IsFsiLabStable(fsi_image):
+      for build in lab_stable_fsi_deltas:
+        if all([fsi_image.board == build.board,
+                fsi_image.channel == build.channel,
+                fsi_image.version == build.version,
+                fsi_image.bucket == build.bucket]):
+          return True
+      return False
+
+    # Create full update tests that involve the current build.
+    for p in payload_manager.GetOnly(['test', 'full']):
+
+      # Update tests from previous to current, if we are newer.
+      for p_prev in payload_manager.GetOnly(['test', 'full', 'previous']):
+        if gspaths.VersionGreater(p_prev.tgt_image.version,
+                                  p.tgt_image.version):
           logging.warning(
               'NMO (%s) is newer than target (%s), skipping NMO full '
-              'update test.', self._previous_version, payload)
-        else:
-          payload_tests.append(self.PayloadTest(
-              payload, src_channel=self._build.channel,
-              src_version=self._previous_version))
+              'update test.', p_prev, p)
+          continue
 
-        # Create a full update test from the current version to itself.
         payload_tests.append(self.PayloadTest(
-            payload,
-            src_channel=self._build.channel,
-            src_version=self._build.version))
+            p,
+            src_channel=p_prev.tgt_image.channel,
+            src_version=p_prev.tgt_image.version))
 
-        # Create a full update test from oldest viable FSI.
-        payload_tests += self._CreateFsiPayloadTests(
-            payload, self._DiscoverAllFsiBuilds())
-      else:
-        # Create a delta update test.
-        payload_tests.append(self.PayloadTest(payload))
+      # Update test from current version to itself.
+      payload_tests.append(self.PayloadTest(
+          p,
+          src_channel=self._build.channel,
+          src_version=self._build.version))
+
+      # Update test from the oldest viable FSI.
+      payload_tests += self._CreateFsiPayloadTests(p, lab_stable_fsi_full)
+
+    # Create delta payload tests.
+    for p in payload_manager.Get(['test', 'delta']):
+      # FSI deltas are included only if they are known to be lab stable.
+      if 'fsi' in p.labels and not IsFsiLabStable(p.src_image):
+        logging.warning(
+            'FSI delta payload (%s) is not lab stable, skipping '
+            'delta update test', p)
+        continue
+
+      payload_tests.append(self.PayloadTest(p))
 
     return payload_tests
 
@@ -1277,7 +1425,7 @@
 
         logging.info('Starting: %s', self._build)
 
-        payloads_skip = self._DiscoverRequiredPayloads()
+        payload_manager = self._DiscoverRequiredPayloads()
 
         # Assume we can finish the build until we find a reason we can't.
         can_finish = True
@@ -1289,41 +1437,40 @@
         # URI accordingly. In doing so we're creating a list of all payload
         # objects and their skip/exist attributes. We're also recording whether
         # this run will be skipping any actual work.
-        payloads_attrs = []
-        for payload, skip in payloads_skip:
+        for p in payload_manager.Get([]):
           if self._output_dir:
             # output_dir means we are forcing all payloads to be generated
             # with a new destination.
             result = [os.path.join(self._output_dir,
-                                   os.path.basename(payload.uri))]
+                                   os.path.basename(p.uri))]
             exists = False
           else:
-            result = paygen_payload_lib.FindExistingPayloads(payload)
+            result = paygen_payload_lib.FindExistingPayloads(p)
             exists = bool(result)
 
           if result:
-            paygen_payload_lib.SetPayloadUri(payload, result[0])
-          elif skip:
+            paygen_payload_lib.SetPayloadUri(p, result[0])
+          elif p['skip']:
             can_finish = False
 
-          payloads_attrs.append((payload, skip, exists))
+          p['exists'] = exists
 
         # Display payload generation list, including payload name and whether
         # or not it already exists or will be skipped.
         log_items = []
-        for payload, skip, exists in payloads_attrs:
-          desc = str(payload)
-          if exists:
+        for p in payload_manager.Get([]):
+          desc = str(p)
+          if p['exists']:
             desc += ' (exists)'
-          elif skip:
+          elif p['skip']:
             desc += ' (skipped)'
           log_items.append(desc)
 
         _LogList('All payloads for the build', log_items)
 
         # Generate new payloads.
-        new_payloads = [payload for payload, skip, exists in payloads_attrs
-                        if not (skip or exists)]
+        new_payloads = [p for p in payload_manager.Get([])
+                        if not (p['skip'] or p['exists'])]
         if new_payloads:
           logging.info('Generating %d new payload(s)', len(new_payloads))
           self._GeneratePayloads(new_payloads, build_lock)
@@ -1348,8 +1495,7 @@
 
             # We have a control file directory and all payloads have been
             # generated. Lets create the list of tests to conduct.
-            payload_tests = self._CreatePayloadTests(
-                [payload for payload, _, _ in payloads_attrs])
+            payload_tests = self._CreatePayloadTests(payload_manager)
             if payload_tests:
               logging.info('Initiating %d payload tests', len(payload_tests))
               self._drm(self._AutotestPayloads, payload_tests)
@@ -1413,8 +1559,11 @@
     BoardNotConfigured if the board is unknown.
   """
   # Right now, we just validate that the board exists.
-  if board not in config.GetCompleteBoardSet():
-    raise BoardNotConfigured(board)
+  boards = json.loads(gslib.Cat(BOARDS_URI))
+  for b in boards.get('boards', []):
+    if b['public_codename'] == board:
+      return
+  raise BoardNotConfigured(board)
 
 
 def CreatePayloads(build, work_dir, site_config, dry_run=False,
diff --git a/lib/paygen/paygen_build_lib_unittest.py b/lib/paygen/paygen_build_lib_unittest.py
index 39e7439..f828e14 100644
--- a/lib/paygen/paygen_build_lib_unittest.py
+++ b/lib/paygen/paygen_build_lib_unittest.py
@@ -6,7 +6,6 @@
 
 from __future__ import print_function
 
-import itertools
 import mox
 import os
 import shutil
@@ -38,6 +37,56 @@
 # pylint: disable=protected-access
 
 
+class PayloadManagerTest(cros_test_lib.MockTestCase):
+  """Unittests for the PayloadManager."""
+
+  def testAdd(self):
+    """Test adding payloads to the manager."""
+    pm = paygen_build_lib.PayloadManager()
+
+    labels = ['test']
+    payloads = [gspaths.Payload(tgt_image='foo',
+                                src_image='bar',
+                                uri='baz'),
+                gspaths.Payload(tgt_image='apple',
+                                src_image='orange',
+                                uri='mango')]
+
+    pm.Add(labels, payloads)
+
+    for payload, p in zip(payloads, pm.payloads):
+      self.assertEquals(labels, p.labels)
+      self.assertEquals(payload.tgt_image, p.tgt_image)
+      self.assertEquals(payload.src_image, p.src_image)
+      self.assertEquals(payload.uri, p.uri)
+
+  def testGet(self):
+    """Test retrieving payloads from the manager."""
+    pm = paygen_build_lib.PayloadManager()
+
+    p1 = gspaths.Payload(tgt_image='foo', labels=['foo', 'test'])
+    p2 = gspaths.Payload(tgt_image='bar', labels=['bar', 'test'])
+
+    pm.payloads = [p1, p2]
+
+    self.assertEquals([p1], pm.Get(['foo']))
+    self.assertEquals([p2], pm.Get(['bar']))
+    self.assertEquals([p1, p2], pm.Get(['test']))
+    self.assertEquals([], pm.Get(['foo', 'bar']))
+
+  def testGetOnly(self):
+    """Test retrieving payloads from the manager."""
+    pm = paygen_build_lib.PayloadManager()
+
+    p1 = gspaths.Payload(tgt_image='bar', labels=['bar', 'test'])
+    p2 = gspaths.Payload(tgt_image='bar', labels=['bar', 'test', 'test2'])
+
+    pm.payloads = [p1, p2]
+
+    self.assertEquals([p1, p2], pm.Get(['bar', 'test']))
+    self.assertEquals([p1], pm.GetOnly(['bar', 'test']))
+
+
 class BasePaygenBuildLibTest(cros_test_lib.MoxTempDirTestCase):
   """Base class for testing PaygenBuildLib class."""
 
@@ -321,17 +370,30 @@
         False,
         paygen_build_lib.ImageMissing)
 
-  @unittest.skipIf(not paygen_build_lib.config, 'Internal crostools required.')
-  def testDiscoverActiveFsiBuilds(self):
-    """Using test release.conf values, test _DiscoverActiveFsiBuilds."""
+  def testDiscoverFsiBuildsForDeltas(self):
+    """Using test goldeneye values, test _DiscoverFsiBuildsForDeltas."""
+    # Set up mock goldeneye fsi information.
+    mock_return_fsi = paygen_build_lib.json.dumps(
+        {'fsis':
+         [{u'is_delta_supported': True, u'chrome_os_version': u'2465.105.0',
+           u'board':
+           {u'public_codename': u'valid-board', u'is_active': True},
+           u'is_lab_stable': True, u'chrome_version': u'31.0.1650.61'},
+          {u'is_delta_supported': True, u'chrome_os_version': u'2467.109.0',
+           u'board':
+           {u'public_codename': u'valid-board', u'is_active': True},
+           u'is_lab_stable': False, u'chrome_version': u'31.0.1650.61'},
+          {u'is_delta_supported': False, u'chrome_os_version': u'2913.331.0',
+           u'board':
+           {u'public_codename': u'valid-board', u'is_active': True},
+           u'is_lab_stable': True, u'chrome_version': u'31.0.1650.61'}]
+        }
+    )
 
-    test_config = """
-[valid-board]
-fsi_images: 2913.331.0,2465.105.0
-
-[no-fsi-board]
-"""
-    paygen_build_lib.config.LoadTestConfig(test_config)
+    self.mox.StubOutWithMock(gslib, 'Cat')
+    gslib.Cat(paygen_build_lib.FSI_URI).AndReturn(mock_return_fsi)
+    gslib.Cat(paygen_build_lib.FSI_URI).AndReturn(mock_return_fsi)
+    self.mox.ReplayAll()
 
     # Test a board with FSI values on stable-channel.
     paygen = paygen_build_lib._PaygenBuild(
@@ -341,13 +403,13 @@
         config_lib_unittest.MockSiteConfig())
 
     self.assertEqual(
-        sorted(paygen._DiscoverActiveFsiBuilds()),
+        sorted(paygen._DiscoverFsiBuildsForDeltas()),
         [gspaths.Build(board='valid-board',
                        channel='stable-channel',
                        version='2465.105.0'),
          gspaths.Build(board='valid-board',
                        channel='stable-channel',
-                       version='2913.331.0')])
+                       version='2467.109.0')])
 
     # Test a board without FSI values on stable-channel.
     paygen = paygen_build_lib._PaygenBuild(
@@ -356,7 +418,7 @@
         self.work_dir,
         config_lib_unittest.MockSiteConfig())
 
-    self.assertEqual(paygen._DiscoverActiveFsiBuilds(), [])
+    self.assertEqual(paygen._DiscoverFsiBuildsForDeltas(), [])
 
     # Test a board with FSI values on non-stable-channel.
     paygen = paygen_build_lib._PaygenBuild(
@@ -365,14 +427,43 @@
         self.work_dir,
         config_lib_unittest.MockSiteConfig())
 
-    self.assertEqual(paygen._DiscoverActiveFsiBuilds(), [])
+    self.assertEqual(paygen._DiscoverFsiBuildsForDeltas(), [])
 
-    paygen_build_lib.config.LoadGlobalConfig()
+  def testDiscoverAllFsiBuildsForDeltaTesting(self):
+    """With goldeneye values, test _DiscoverAllFsiBuildsForDeltaTesting."""
+    mock_return_fsi = paygen_build_lib.json.dumps(
+        {'fsis':
+         [{u'is_delta_supported': True, u'chrome_os_version': u'2465.105.0',
+           u'board':
+           {u'public_codename': u'valid-board', u'is_active': True},
+           u'is_lab_stable': True, u'chrome_version': u'31.0.1650.61'},
+          {u'is_delta_supported': True, u'chrome_os_version': u'2467.109.0',
+           u'board':
+           {u'public_codename': u'valid-board', u'is_active': True},
+           u'is_lab_stable': False, u'chrome_version': u'31.0.1650.61'}]
+        }
+    )
+
+    self.mox.StubOutWithMock(gslib, 'Cat')
+    gslib.Cat(paygen_build_lib.FSI_URI).AndReturn(mock_return_fsi)
+    self.mox.ReplayAll()
+
+    # Test that only FSIs marked as is_lab_stable are selected.
+    paygen = paygen_build_lib._PaygenBuild(
+        gspaths.Build(channel='stable-channel', board='valid-board',
+                      version='1.2.3'),
+        self.work_dir,
+        config_lib_unittest.MockSiteConfig())
+
+    self.assertEqual(
+        paygen._DiscoverAllFsiBuildsForDeltaTesting(),
+        [gspaths.Build(board='valid-board',
+                       channel='stable-channel',
+                       version='2465.105.0')])
 
   @cros_test_lib.NetworkTest()
-  @unittest.skipIf(not paygen_build_lib.config, 'Internal crostools required.')
-  def testDiscoverAllFsiBuilds(self):
-    """Using test release.conf values, test _DiscoverActiveFsiBuilds."""
+  def testDiscoverAllFsiBuildsForFullTesting(self):
+    """With goldeneye values, test _DiscoverAllFsiBuildsForFullTesting."""
     paygen = paygen_build_lib._PaygenBuild(
         gspaths.Build(channel='stable-channel', board='x86-alex-he',
                       version='1.2.3'),
@@ -380,22 +471,59 @@
         config_lib_unittest.MockSiteConfig())
 
     # Search for real FSIs for an older/live board.
-    self.assertEqual(paygen._DiscoverAllFsiBuilds(),
+    self.assertEqual(paygen._DiscoverAllFsiBuildsForFullTesting(),
                      ['0.12.433.257', '0.14.811.132', '1412.205.0'])
 
-  @unittest.skipIf(not paygen_build_lib.query, 'Internal crostools required.')
   def testDiscoverNmoBuild(self):
     """Test _DiscoverNmoBuild (N minus One)."""
     paygen = self._GetPaygenBuildInstance()
 
-    self.mox.StubOutWithMock(paygen_build_lib.query, 'FindLatestPublished')
+    # Set up mock goldeneye omaha status information.
+    mock_return_foo = paygen_build_lib.json.dumps(
+        {'omaha_data':
+         [{u'is_mp_keyset': True, u'chrome_version': u'47.0.2514.0',
+           u'keyset': u'foo-mp',
+           u'board':
+           {u'public_codename': u'foo-board', u'is_active': True},
+           u'chrome_os_version': u'7478.0.0', u'channel': u'foo',
+           u'payloads':
+           [{u'max_fraction': False,
+             u'name': u'foo-channel/foo/7478.0.0/payloads/'
+                      u'chromeos_7475.0.0-7478.0.0_foo_foo'
+                      u'-channel_delta_mp.bin-877f148a914c1cdbe2'
+                      u'42aa4247a1d135.signed', u'fraction': 1.0},
+            {u'max_fraction': False,
+             u'name': u'foo-channel/foo/7478.0.0/payloads/'
+                      u'chromeos_7478.0.0_foo_foo-channel_'
+                      u'full_mp.bin-fddc0ae18c9845325c13704ee00b'
+                      u'd0a4.signed', u'fraction': 1.0}]}]
+        }
+    )
 
-    # Set up the test replay script.
-    paygen_build_lib.query.FindLatestPublished(
-        'foo-channel', 'foo-board').AndReturn('1.0.0')
+    mock_return_not_foo = {
+        'omaha_data':
+        [{u'is_mp_keyset': True, u'chrome_version': u'47.0.2514.0',
+          u'keyset': u'notfoo-mp',
+          u'board':
+          {u'public_codename': u'notfoo-board', u'is_active': True},
+          u'chrome_os_version': u'7478.0.0', u'channel': u'notfoo-channel',
+          u'payloads':
+          [{u'max_fraction': False,
+            u'name': u'notfoo-channel/notfoo/7478.0.0/payloads/'
+                     u'chromeos_7475.0.0-7478.0.0_notfoo_notfoo'
+                     u'-channel_delta_mp.bin-877f148a914c1cdbe2'
+                     u'42aa4247a1d135.signed', u'fraction': 1.0},
+           {u'max_fraction': False,
+            u'name': u'notfoo-channel/notfoo/7478.0.0/payloads/'
+                     u'chromeos_7478.0.0_notfoo_notfoo-channel_'
+                     u'full_mp.bin-fddc0ae18c9845325c13704ee00b'
+                     u'd0a4.signed', u'fraction': 1.0}]}]
+        }
 
-    paygen_build_lib.query.FindLatestPublished(
-        'foo-channel', 'foo-board').AndReturn(None)
+    # Mock out the actual call to gslib.Cat that occurs before a cached
+    # result exists in the paygen object.
+    self.mox.StubOutWithMock(gslib, 'Cat')
+    gslib.Cat(paygen_build_lib.OMAHA_URI).AndReturn(mock_return_foo)
 
     # Run the test verification.
     self.mox.ReplayAll()
@@ -404,8 +532,9 @@
                      [gspaths.Build(bucket='crt',
                                     channel='foo-channel',
                                     board='foo-board',
-                                    version='1.0.0')])
+                                    version='7478.0.0')])
 
+    paygen.cachedOmahaJson = mock_return_not_foo
     self.assertEqual(paygen._DiscoverNmoBuild(), [])
 
   def testDiscoverRequiredFullPayloads(self):
@@ -501,7 +630,7 @@
 
     self.mox.StubOutWithMock(paygen, '_DiscoverImages')
     self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
-    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen, '_DiscoverFsiBuildsForDeltas')
 
     paygen.BUILD_DISCOVER_RETRY_SLEEP = 0
 
@@ -531,7 +660,7 @@
     self.mox.StubOutWithMock(paygen, '_DiscoverImages')
     self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
     self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
-    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen, '_DiscoverFsiBuildsForDeltas')
     self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
 
     nmo_build = gspaths.Build(bucket='crt',
@@ -557,14 +686,14 @@
     paygen._DiscoverImages(paygen._build).AndReturn(self.images)
     paygen._DiscoverTestImageArchives(paygen._build).AndReturn(
         [self.test_image])
+    paygen._DiscoverFsiBuildsForDeltas().AndReturn([fsi1_build, fsi2_build])
     paygen._DiscoverNmoBuild().AndReturn([nmo_build])
-    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
-    paygen._DiscoverImages(nmo_build).AndReturn(nmo_images)
-    paygen._DiscoverTestImageArchives(nmo_build).AndReturn([nmo_test_image])
     paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
     paygen._DiscoverTestImageArchives(fsi1_build).AndReturn([fsi1_test_image])
     paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
     paygen._DiscoverTestImageArchives(fsi2_build).AndReturn([fsi2_test_image])
+    paygen._DiscoverImages(nmo_build).AndReturn(nmo_images)
+    paygen._DiscoverTestImageArchives(nmo_build).AndReturn([nmo_test_image])
 
     # Simplify the output URIs, so it's easy to check them below.
     paygen_payload_lib.DefaultPayloadUri(
@@ -573,63 +702,89 @@
     # Run the test verification.
     self.mox.ReplayAll()
 
-    results = paygen._DiscoverRequiredPayloads()
+    self.maxDiff = None
 
-    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+    payload_manager = paygen._DiscoverRequiredPayloads()
+
+    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri,
+                                labels=['full']),
+
+                gspaths.Payload(tgt_image=nmo_images[0], uri=output_uri,
+                                labels=['full', 'previous']),
+                gspaths.Payload(tgt_image=nmo_images[1], uri=output_uri,
+                                labels=['full', 'previous']),
                 # NPO Deltas
                 gspaths.Payload(tgt_image=self.npo_image,
                                 src_image=self.basic_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'npo']),
                 gspaths.Payload(tgt_image=self.premp_npo_image,
                                 src_image=self.premp_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'npo']),
                 # NMO Delta
                 gspaths.Payload(tgt_image=self.basic_image,
                                 src_image=nmo_images[0],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'previous']),
                 gspaths.Payload(tgt_image=self.premp_image,
                                 src_image=nmo_images[1],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'previous']),
                 # FSI Deltas
                 gspaths.Payload(tgt_image=self.basic_image,
                                 src_image=fsi1_images[0],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.premp_image,
                                 src_image=fsi1_images[1],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.basic_image,
                                 src_image=fsi2_images[0],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.premp_image,
                                 src_image=fsi2_images[1],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
 
                 # Test full payload.
                 gspaths.Payload(tgt_image=self.test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'full']),
+                gspaths.Payload(tgt_image=nmo_test_image,
+                                uri=output_uri,
+                                labels=['test', 'full', 'previous']),
 
                 # Test NPO delta.
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=self.test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'delta', 'npo']),
 
                 # Test NMO delta.
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=nmo_test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'delta', 'previous']),
 
                 # Test FSI deltas.
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=fsi1_test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=fsi2_test_image,
-                                uri=output_uri)]
-    expected = zip(expected, itertools.repeat(False))
-
+                                uri=output_uri,
+                                labels=['test', 'delta', 'fsi'])]
+    results = payload_manager.Get([])
     self.assertItemsEqual(sorted(results), sorted(expected))
 
   def testDiscoverRequiredPayloadsPreviousSkipped(self):
@@ -646,7 +801,7 @@
     self.mox.StubOutWithMock(paygen, '_DiscoverImages')
     self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
     self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
-    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen, '_DiscoverFsiBuildsForDeltas')
     self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
 
     nmo_build = gspaths.Build(bucket='crt',
@@ -670,15 +825,15 @@
     paygen._DiscoverImages(paygen._build).AndReturn(self.images)
     paygen._DiscoverTestImageArchives(paygen._build).AndReturn(
         [self.test_image])
+    paygen._DiscoverFsiBuildsForDeltas().AndReturn([fsi1_build, fsi2_build])
     paygen._DiscoverNmoBuild().AndReturn([nmo_build])
-    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
-    paygen._DiscoverImages(nmo_build).AndRaise(
-        paygen_build_lib.ImageMissing('nmo build is missing some image'))
-    # _DiscoverTestImageArchives(nmo_build) should NOT be called.
     paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
     paygen._DiscoverTestImageArchives(fsi1_build).AndReturn([fsi1_test_image])
     paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
     paygen._DiscoverTestImageArchives(fsi2_build).AndReturn([fsi2_test_image])
+    paygen._DiscoverImages(nmo_build).AndRaise(
+        paygen_build_lib.ImageMissing('nmo build is missing some image'))
+    # _DiscoverTestImageArchives(nmo_build) should NOT be called.
 
     # Simplify the output URIs, so it's easy to check them below.
     paygen_payload_lib.DefaultPayloadUri(
@@ -687,53 +842,66 @@
     # Run the test verification.
     self.mox.ReplayAll()
 
-    results = paygen._DiscoverRequiredPayloads()
+    payload_manager = paygen._DiscoverRequiredPayloads()
 
     # IMPORTANT: we intentionally omit the NMO payload from the expected list
     # of payloads as it is a duplicate of one of the FSIs.
-    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri,
+                                labels=['full']),
                 # NPO Deltas
                 gspaths.Payload(tgt_image=self.npo_image,
                                 src_image=self.basic_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'npo']),
                 gspaths.Payload(tgt_image=self.premp_npo_image,
                                 src_image=self.premp_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'npo']),
                 # FSI Deltas
                 gspaths.Payload(tgt_image=self.basic_image,
                                 src_image=fsi1_images[0],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.premp_image,
                                 src_image=fsi1_images[1],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.basic_image,
                                 src_image=fsi2_images[0],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.premp_image,
                                 src_image=fsi2_images[1],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
 
                 # Test full payload.
                 gspaths.Payload(tgt_image=self.test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'full']),
 
                 # Test NPO delta.
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=self.test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'delta', 'npo']),
 
                 # Test FSI deltas.
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=fsi1_test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=fsi2_test_image,
-                                uri=output_uri)]
-    expected = zip(expected, itertools.repeat(False))
-
+                                uri=output_uri,
+                                labels=['test', 'delta', 'fsi'])]
+    results = payload_manager.Get([])
     self.assertItemsEqual(sorted(results), sorted(expected))
 
   def testDiscoverRequiredPayloadsNmoIsAlsoFsi(self):
@@ -746,7 +914,7 @@
     self.mox.StubOutWithMock(paygen, '_DiscoverImages')
     self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
     self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
-    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen, '_DiscoverFsiBuildsForDeltas')
     self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
 
     nmo_build = gspaths.Build(bucket='crt',
@@ -770,7 +938,7 @@
     paygen._DiscoverImages(paygen._build).AndReturn(self.images)
     paygen._DiscoverTestImageArchives(paygen._build).AndReturn(
         [self.test_image])
-    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
+    paygen._DiscoverFsiBuildsForDeltas().AndReturn([fsi1_build, fsi2_build])
     paygen._DiscoverNmoBuild().AndReturn([nmo_build])
     paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
     paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
@@ -784,52 +952,65 @@
     # Run the test verification.
     self.mox.ReplayAll()
 
-    results = paygen._DiscoverRequiredPayloads()
+    payload_manager = paygen._DiscoverRequiredPayloads()
 
-    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
-                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri,
+                                labels=['full']),
+                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri,
+                                labels=['full']),
                 # NPO Deltas
                 gspaths.Payload(tgt_image=self.npo_image,
                                 src_image=self.basic_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'npo']),
                 gspaths.Payload(tgt_image=self.premp_npo_image,
                                 src_image=self.premp_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'npo']),
                 # FSI Deltas
                 gspaths.Payload(tgt_image=self.basic_image,
                                 src_image=fsi1_images[0],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.premp_image,
                                 src_image=fsi1_images[1],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.basic_image,
                                 src_image=fsi2_images[0],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.premp_image,
                                 src_image=fsi2_images[1],
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['delta', 'fsi']),
 
                 # Test full payload.
                 gspaths.Payload(tgt_image=self.test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'full']),
 
                 # Test NPO delta.
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=self.test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'delta', 'npo']),
 
                 # Test FSI deltas.
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=fsi1_test_image,
-                                uri=output_uri),
+                                uri=output_uri,
+                                labels=['test', 'delta', 'fsi']),
                 gspaths.Payload(tgt_image=self.test_image,
                                 src_image=fsi2_test_image,
-                                uri=output_uri)]
+                                uri=output_uri,
+                                labels=['test', 'delta', 'fsi'])]
 
-    expected = zip(expected, itertools.repeat(False))
-
+    results = payload_manager.Get([])
     self.assertItemsEqual(sorted(results), sorted(expected))
 
   def testFindFullTestPayloads(self):
@@ -1054,17 +1235,18 @@
     finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
 
     lock = self.mox.CreateMockAnything()
-    payload = 'foo'
+    payload = gspaths.Payload(tgt_image='foo')
     payload_list = [payload]
-    payload_skip_list = [(payload, False)]
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add([], payload_list)
+
     mock_exception = Exception()
 
     gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
     lock.__enter__().AndReturn(lock)
     gslib.Exists(skip_uri).AndReturn(False)
     gslib.Exists(finished_uri).AndReturn(False)
-    paygen._DiscoverRequiredPayloads(
-        ).AndReturn(payload_skip_list)
+    paygen._DiscoverRequiredPayloads().AndReturn(payload_manager)
     self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
     paygen_payload_lib.FindExistingPayloads(payload).AndReturn([])
     paygen._GeneratePayloads(payload_list, lock).AndRaise(mock_exception)
@@ -1086,22 +1268,23 @@
     finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
 
     lock = self.mox.CreateMockAnything()
-    payload = 'foo'
+    payload = gspaths.Payload(tgt_image='foo')
     payload_list = [payload]
-    payload_skip_list = [(payload, False)]
+
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add([], payload_list)
 
     gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
     lock.__enter__().AndReturn(lock)
     gslib.Exists(skip_uri).AndReturn(False)
     gslib.Exists(finished_uri).AndReturn(False)
-    paygen._DiscoverRequiredPayloads(
-        ).AndReturn(payload_skip_list)
+    paygen._DiscoverRequiredPayloads().AndReturn(payload_manager)
     self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
     paygen_payload_lib.FindExistingPayloads(payload).AndReturn([])
     paygen._GeneratePayloads(payload_list, lock)
     paygen._MapToArchive('foo-board', '1.2.3').AndReturn(
         ('archive_board', 'archive_build', 'archive_build_uri'))
-    paygen._CreatePayloadTests(['foo']).AndReturn(['Test Payloads'])
+    paygen._CreatePayloadTests(payload_manager).AndReturn(['Test Payloads'])
     paygen._AutotestPayloads(['Test Payloads'])
 
     paygen._CleanupBuild()
@@ -1125,16 +1308,17 @@
     lock = self.mox.CreateMockAnything()
     self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
     self.mox.StubOutWithMock(paygen_payload_lib, 'SetPayloadUri')
-    payload_existing = 'foo'
-    payload_new = 'bar'
-    payload_list = [(payload_existing, False), (payload_new, False)]
+    payload_existing = gspaths.Payload(tgt_image='foo')
+    payload_new = gspaths.Payload(tgt_image='bar')
+
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add([], [payload_existing, payload_new])
 
     gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
     lock.__enter__().AndReturn(lock)
     gslib.Exists(skip_uri).AndReturn(False)
     gslib.Exists(finished_uri).AndReturn(False)
-    paygen._DiscoverRequiredPayloads(
-        ).AndReturn(payload_list)
+    paygen._DiscoverRequiredPayloads().AndReturn(payload_manager)
     paygen_payload_lib.FindExistingPayloads(payload_existing).AndReturn(
         [payload_existing])
     paygen_payload_lib.FindExistingPayloads(payload_new).AndReturn([])
@@ -1142,7 +1326,7 @@
     paygen._GeneratePayloads([payload_new], lock)
     paygen._MapToArchive('foo-board', '1.2.3').AndReturn(
         ('archive_board', 'archive_build', 'archive_build_uri'))
-    paygen._CreatePayloadTests(['foo', 'bar']).AndReturn(['Test Payloads'])
+    paygen._CreatePayloadTests(payload_manager).AndReturn(['Test Payloads'])
     paygen._AutotestPayloads(['Test Payloads'])
     gslib.CreateWithContents(finished_uri, mox.IgnoreArg())
     paygen._CleanupBuild()
@@ -1164,16 +1348,17 @@
     finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
 
     lock = self.mox.CreateMockAnything()
-    payload = 'foo'
+    payload = gspaths.Payload(tgt_image='foo')
     payload_list = [payload]
-    payload_skip_list = [(payload, False)]
+
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add([], payload_list)
 
     gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
     lock.__enter__().AndReturn(lock)
     gslib.Exists(skip_uri).AndReturn(False)
     gslib.Exists(finished_uri).AndReturn(False)
-    paygen._DiscoverRequiredPayloads(
-        ).AndReturn(payload_skip_list)
+    paygen._DiscoverRequiredPayloads().AndReturn(payload_manager)
     self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
     paygen_payload_lib.FindExistingPayloads(payload).AndReturn([])
     paygen._GeneratePayloads(payload_list, lock)
@@ -1190,7 +1375,8 @@
   def setupCreatePayloadTests(self):
     paygen = self._GetPaygenBuildInstance()
 
-    self.mox.StubOutWithMock(paygen, '_DiscoverAllFsiBuilds')
+    self.mox.StubOutWithMock(paygen, '_DiscoverAllFsiBuildsForDeltaTesting')
+    self.mox.StubOutWithMock(paygen, '_DiscoverAllFsiBuildsForFullTesting')
     self.mox.StubOutWithMock(paygen, '_FindFullTestPayloads')
 
     return paygen
@@ -1198,12 +1384,18 @@
   def testCreatePayloadTestsEmpty(self):
 
     payloads = []
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add([], payloads)
+
     paygen = self.setupCreatePayloadTests()
 
+    paygen._DiscoverAllFsiBuildsForDeltaTesting().AndReturn([])
+    paygen._DiscoverAllFsiBuildsForFullTesting().AndReturn([])
+
     # Run the test verification.
     self.mox.ReplayAll()
 
-    expected = paygen._CreatePayloadTests(payloads)
+    expected = paygen._CreatePayloadTests(payload_manager)
     self.assertEqual(expected, [])
 
   def testCreatePayloadTestsPopulated(self):
@@ -1212,10 +1404,16 @@
         gspaths.Payload(tgt_image=self.test_image),
         gspaths.Payload(tgt_image=self.prev_image, src_image=self.test_image)
     ]
+
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add(['test', 'full'], [payloads[0]])
+    payload_manager.Add(['test', 'delta'], [payloads[1]])
+
     paygen = self.setupCreatePayloadTests()
 
     # We search for FSIs once for each full payload.
-    paygen._DiscoverAllFsiBuilds().AndReturn(['0.9.9', '1.0.0'])
+    paygen._DiscoverAllFsiBuildsForDeltaTesting().AndReturn(['0.9.9', '1.0.0'])
+    paygen._DiscoverAllFsiBuildsForFullTesting().AndReturn(['0.9.9', '1.0.0'])
     paygen._FindFullTestPayloads('stable-channel', '0.9.9').AndReturn(False)
     paygen._FindFullTestPayloads('stable-channel', '1.0.0').AndReturn(True)
 
@@ -1224,16 +1422,65 @@
 
     self.maxDiff = None
 
-    expected = paygen._CreatePayloadTests(payloads)
+    labelled_payloads = [
+        gspaths.Payload(tgt_image=self.test_image, labels=['test', 'full']),
+        gspaths.Payload(tgt_image=self.prev_image, src_image=self.test_image,
+                        labels=['test', 'delta'])
+    ]
+    expected = paygen._CreatePayloadTests(payload_manager)
     self.assertEqual(expected, [
         paygen.PayloadTest(
-            payloads[0], src_channel='foo-channel', src_version='1.2.3'),
+            labelled_payloads[0], src_channel='foo-channel',
+            src_version='1.2.3'),
         paygen.PayloadTest(
-            payloads[0], src_channel='stable-channel', src_version='1.0.0'),
+            labelled_payloads[0], src_channel='stable-channel',
+            src_version='1.0.0'),
         paygen.PayloadTest(
-            payloads[1]),
+            labelled_payloads[1]),
     ])
 
+  def testCreatePayloadTestsLabStableFsiDelta(self):
+    payloads = [
+        gspaths.Payload(tgt_image=self.prev_image, src_image=self.test_image)
+    ]
+
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add(['test', 'delta', 'fsi'], payloads)
+
+    paygen = self.setupCreatePayloadTests()
+
+    paygen._DiscoverAllFsiBuildsForDeltaTesting().AndReturn([self.foo_build])
+    paygen._DiscoverAllFsiBuildsForFullTesting().AndReturn([])
+
+    self.mox.ReplayAll()
+
+    labelled_payloads = [
+        gspaths.Payload(tgt_image=self.prev_image, src_image=self.test_image,
+                        labels=['test', 'delta', 'fsi'])
+    ]
+    expected = paygen._CreatePayloadTests(payload_manager)
+    self.assertEquals(expected, [paygen.PayloadTest(labelled_payloads[0])])
+
+  def testCreatePayloadTestsLabUnstableFsiDelta(self):
+    payloads = [
+        gspaths.Payload(tgt_image=self.prev_image, src_image=self.test_image)
+    ]
+
+    payload_manager = paygen_build_lib.PayloadManager()
+    payload_manager.Add(['test', 'delta', 'fsi'], payloads)
+
+    paygen = self.setupCreatePayloadTests()
+
+    paygen._DiscoverAllFsiBuildsForDeltaTesting().AndReturn(
+        [gspaths.Build(bucket='crt', channel='not-foo-channel',
+                       board='foo-board', version='1.2.3')]
+    )
+    paygen._DiscoverAllFsiBuildsForFullTesting().AndReturn([])
+
+    self.mox.ReplayAll()
+
+    expected = paygen._CreatePayloadTests(payload_manager)
+    self.assertEquals(expected, [])
 
   def testFindControlFileDir(self):
     """Test that we find control files in the proper directory."""
@@ -1250,8 +1497,6 @@
     self.assertTrue(result.startswith(
         os.path.join(self.tempdir, 'paygen_build-control_files')))
 
-  @unittest.skipIf(not paygen_build_lib.config,
-                   'Internal crostools repository needed.')
   @unittest.skipIf(not paygen_build_lib.test_control,
                    'Autotest repository needed.')
   def testEmitControlFile(self):
@@ -1308,13 +1553,12 @@
     control_contents = osutils.ReadFile(cf)
 
     self.assertEqual(control_contents, '''name = 'paygen_foo'
-image_type = 'test'
 update_type = 'delta'
 source_release = '1.2.3'
 target_release = '1.2.3'
-source_image_uri = 'gs://foo/bar.tar.bz2'
 target_payload_uri = 'None'
 SUITE = 'paygen_foo'
+source_payload_uri = 'gs://foo/bar.tar.bz2'
 source_archive_uri = 'gs://chromeos-releases/foo-channel/foo-board/1.2.3'
 
 AUTHOR = "Chromium OS"
@@ -1526,11 +1770,25 @@
 
   def testValidateBoardConfig(self):
     """Test ValidateBoardConfig."""
+    mock_return = paygen_build_lib.json.dumps(
+        {'boards':
+         [{u'omaha_config_name': u'autoupdate-ascii-memento.config',
+           u'public_codename': u'x86-mario',
+           u'hwid_match': u'IEC MARIO FISH 2330|IEC MARIO FISH 2330 DEV|'
+                          u'IEC MARIO PONY 6101|IEC MARIO PONY DVT 8784|'
+                          u'IEC MARIO PONY EVT 3495|IEC MARIO PONY TEST 6101',
+           u'is_active': True,
+           u'app_id': u'{87efface-864d-49a5-9bb3-4b050a7c227a}',
+           u'config_name': u'cr48', u'is_test_blacklist': False,
+           u'omaha_ping_hwid_match': u'IEC MARIO FISH 2330 DEV',
+           u'is_in_canary_release': True}]
+        }
+    )
 
-    # If we are running on an external builder, we can't see the config.
-    # Without the config, we can't validate.
-    if not paygen_build_lib.config:
-      return
+    self.mox.StubOutWithMock(gslib, 'Cat')
+    gslib.Cat(paygen_build_lib.BOARDS_URI).AndReturn(mock_return)
+    gslib.Cat(paygen_build_lib.BOARDS_URI).AndReturn(mock_return)
+    self.mox.ReplayAll()
 
     # Test a known board works.
     paygen_build_lib.ValidateBoardConfig('x86-mario')
@@ -1705,7 +1963,7 @@
     self.mox.StubOutWithMock(paygen, '_DiscoverImages')
     self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
     self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
-    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen, '_DiscoverFsiBuildsForDeltas')
     self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
 
     nmo_build = gspaths.Build(bucket='crt',
@@ -1722,7 +1980,6 @@
                                version='1.1.0')
 
     nmo_images = self._GetBuildImages(nmo_build)
-    nmo_test_image = self._GetBuildTestImage(nmo_build)
     fsi1_images = self._GetBuildImages(fsi1_build)
     fsi1_test_image = self._GetBuildTestImage(fsi1_build)
     fsi2_images = self._GetBuildImages(fsi2_build)
@@ -1730,14 +1987,14 @@
 
     paygen._DiscoverImages(paygen._build).AndReturn(self.images)
     paygen._DiscoverTestImageArchives(paygen._build).AndReturn([])
-    paygen._DiscoverNmoBuild().AndReturn([nmo_build])
-    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
-    paygen._DiscoverImages(nmo_build).AndReturn(nmo_images)
-    paygen._DiscoverTestImageArchives(nmo_build).AndReturn([nmo_test_image])
+    paygen._DiscoverFsiBuildsForDeltas().AndReturn([fsi1_build, fsi2_build])
     paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
     paygen._DiscoverTestImageArchives(fsi1_build).AndReturn([fsi1_test_image])
     paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
     paygen._DiscoverTestImageArchives(fsi2_build).AndReturn([fsi2_test_image])
+    paygen._DiscoverNmoBuild().AndReturn([nmo_build])
+    paygen._DiscoverImages(nmo_build).AndReturn(nmo_images)
+    paygen._DiscoverTestImageArchives(nmo_build).AndReturn([])
 
     # Simplify the output URIs, so it's easy to check them below.
     paygen_payload_lib.DefaultPayloadUri(
@@ -1746,35 +2003,53 @@
     # Run the test verification.
     self.mox.ReplayAll()
 
-    results = paygen._DiscoverRequiredPayloads()
+    self.maxDiff = None
+
+    payload_manager = paygen._DiscoverRequiredPayloads()
 
     expected = [
-        gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
-        gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
-        gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
-        gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+        gspaths.Payload(tgt_image=self.basic_image, uri=output_uri,
+                        labels=['full']),
+        gspaths.Payload(tgt_image=self.npo_image, uri=output_uri,
+                        labels=['full']),
+        gspaths.Payload(tgt_image=self.premp_image, uri=output_uri,
+                        labels=['full']),
+        gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri,
+                        labels=['full']),
+
+        gspaths.Payload(tgt_image=nmo_images[0], uri=output_uri,
+                        labels=['full', 'previous']),
+        gspaths.Payload(tgt_image=nmo_images[1], uri=output_uri,
+                        labels=['full', 'previous']),
         # No NPO Deltas because the basic images have different image types.
 
         # NMO deltas.
         gspaths.Payload(tgt_image=self.basic_image,
                         src_image=nmo_images[0],
-                        uri=output_uri),
+                        uri=output_uri,
+                        labels=['delta', 'previous']),
         gspaths.Payload(tgt_image=self.premp_image,
                         src_image=nmo_images[1],
-                        uri=output_uri),
+                        uri=output_uri,
+                        labels=['delta', 'previous']),
 
         # FSI Deltas.
         gspaths.Payload(tgt_image=self.basic_image,
                         src_image=fsi1_images[0],
-                        uri=output_uri),
+                        uri=output_uri,
+                        labels=['delta', 'fsi']),
         gspaths.Payload(tgt_image=self.premp_image,
                         src_image=fsi1_images[1],
-                        uri=output_uri),
+                        uri=output_uri,
+                        labels=['delta', 'fsi']),
         gspaths.Payload(tgt_image=self.basic_image,
                         src_image=fsi2_images[0],
-                        uri=output_uri),
+                        uri=output_uri,
+                        labels=['delta', 'fsi']),
         gspaths.Payload(tgt_image=self.premp_image,
                         src_image=fsi2_images[1],
-                        uri=output_uri)]
-    expected = zip(expected, itertools.repeat(False))
+                        uri=output_uri,
+                        labels=['delta', 'fsi'])]
+
+    results = payload_manager.Get([])
     self.assertItemsEqual(sorted(results), sorted(expected))
diff --git a/lib/paygen/paygen_payload_lib.py b/lib/paygen/paygen_payload_lib.py
index 7273115..b35ab73 100644
--- a/lib/paygen/paygen_payload_lib.py
+++ b/lib/paygen/paygen_payload_lib.py
@@ -306,27 +306,29 @@
     delta_log = self._RunGeneratorCmd(cmd)
     self._StoreDeltaLog(delta_log)
 
-  def _GenPayloadHash(self):
-    """Generate a hash of payload and metadata.
+  def _GenerateHashes(self):
+    """Generate a payload hash and a metadata hash.
 
     Works from an unsigned update payload.
 
     Returns:
-      payload_hash as a string.
+      payload_hash as a string, metadata_hash as a string.
     """
-    logging.info('Calculating payload hashes on %s.', self.payload_file)
+    logging.info('Calculating hashes on %s.', self.payload_file)
 
     # How big will the signatures be.
     signature_sizes = [str(size) for size in self.PAYLOAD_SIGNATURE_SIZES_BYTES]
 
-    with tempfile.NamedTemporaryFile('rb') as payload_hash_file:
-      cmd = ['delta_generator',
-             '-in_file=' + self.payload_file,
-             '-out_hash_file=' + payload_hash_file.name,
-             '-signature_size=' + ':'.join(signature_sizes)]
+    with tempfile.NamedTemporaryFile('rb') as payload_hash_file, \
+         tempfile.NamedTemporaryFile('rb') as metadata_hash_file:
+      cmd = ['brillo_update_payload', 'hash',
+             '--unsigned_payload', self.payload_file,
+             '--payload_hash_file', payload_hash_file.name,
+             '--metadata_hash_file', metadata_hash_file.name,
+             '--signature_size', ':'.join(signature_sizes)]
 
       self._RunGeneratorCmd(cmd)
-      return payload_hash_file.read()
+      return payload_hash_file.read(), metadata_hash_file.read()
 
   def _MetadataSize(self, payload_file):
     """Discover the metadata size.
@@ -346,28 +348,6 @@
       payload.Init()
       return payload.data_offset
 
-  def _GenMetadataHash(self):
-    """Generate a hash of payload and metadata.
-
-    Works from an unsigned update payload.
-
-    Returns:
-      metadata_hash as a string.
-    """
-    logging.info('Calculating payload hashes on %s.', self.payload_file)
-
-    # How big will the signatures be.
-    signature_sizes = [str(size) for size in self.PAYLOAD_SIGNATURE_SIZES_BYTES]
-
-    with tempfile.NamedTemporaryFile('rb') as metadata_hash_file:
-      cmd = ['delta_generator',
-             '-in_file=' + self.payload_file,
-             '-out_metadata_hash_file=' + metadata_hash_file.name,
-             '-signature_size=' + ':'.join(signature_sizes)]
-
-      self._RunGeneratorCmd(cmd)
-      return metadata_hash_file.read()
-
   def _GenerateSignerResultsError(self, format_str, *args):
     """Helper for reporting errors with signer results."""
     msg = format_str % args
@@ -538,8 +518,7 @@
       List of payload signatures, List of metadata signatures.
     """
     # Create hashes to sign.
-    payload_hash = self._GenPayloadHash()
-    metadata_hash = self._GenMetadataHash()
+    payload_hash, metadata_hash = self._GenerateHashes()
 
     # Sign them.
     # pylint: disable=unpacking-non-sequence
diff --git a/lib/paygen/paygen_payload_lib_unittest.py b/lib/paygen/paygen_payload_lib_unittest.py
index 9526998..8c6d188 100644
--- a/lib/paygen/paygen_payload_lib_unittest.py
+++ b/lib/paygen/paygen_payload_lib_unittest.py
@@ -125,7 +125,7 @@
 
     if not au_generator_uri_override:
       au_generator_uri_override = gspaths.ChromeosReleases.GeneratorUri(
-          payload.tgt_image.channel, payload.tgt_image.board, '6351.0.0')
+          payload.tgt_image.channel, payload.tgt_image.board, '7587.0.0')
 
     return paygen_payload_lib._PaygenPayload(
         payload=payload,
@@ -403,8 +403,8 @@
     self.mox.ReplayAll()
     gen._GenerateUnsignedPayload()
 
-  def testGenPayloadHashes(self):
-    """Test _GenPayloadHash via mox."""
+  def testGenerateHashes(self):
+    """Test _GenerateHashes via mox."""
     gen = self._GetStdGenerator()
 
     # Stub out the required functions.
@@ -412,34 +412,16 @@
                              '_RunGeneratorCmd')
 
     # Record the expected function calls.
-    cmd = ['delta_generator',
-           '-in_file=' + gen.payload_file,
-           mox.IsA(str),
-           '-signature_size=256']
+    cmd = ['brillo_update_payload', 'hash',
+           '--unsigned_payload', gen.payload_file,
+           '--payload_hash_file', mox.IsA(str),
+           '--metadata_hash_file', mox.IsA(str),
+           '--signature_size', '256']
     gen._RunGeneratorCmd(cmd)
 
     # Run the test.
     self.mox.ReplayAll()
-    self.assertEqual(gen._GenPayloadHash(), '')
-
-  def testGenMetadataHashes(self):
-    """Test _GenPayloadHash via mox."""
-    gen = self._GetStdGenerator()
-
-    # Stub out the required functions.
-    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
-                             '_RunGeneratorCmd')
-
-    # Record the expected function calls.
-    cmd = ['delta_generator',
-           '-in_file=' + gen.payload_file,
-           mox.IsA(str),
-           '-signature_size=256']
-    gen._RunGeneratorCmd(cmd)
-
-    # Run the test.
-    self.mox.ReplayAll()
-    self.assertEqual(gen._GenMetadataHash(), '')
+    self.assertEqual(gen._GenerateHashes(), ('', ''))
 
   def testSignHashes(self):
     """Test _SignHashes via mox."""
@@ -562,9 +544,7 @@
 
     # Set up stubs.
     self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
-                             '_GenPayloadHash')
-    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
-                             '_GenMetadataHash')
+                             '_GenerateHashes')
     self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
                              '_SignHashes')
     self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
@@ -573,8 +553,7 @@
                              '_StoreMetadataSignatures')
 
     # Record expected calls.
-    gen._GenPayloadHash().AndReturn(payload_hash)
-    gen._GenMetadataHash().AndReturn(metadata_hash)
+    gen._GenerateHashes().AndReturn((payload_hash, metadata_hash))
     gen._SignHashes([payload_hash, metadata_hash]).AndReturn(
         (payload_sigs, metadata_sigs))
     gen._InsertPayloadSignatures(payload_sigs)
@@ -759,7 +738,7 @@
         cache=self.cache,
         work_dir=self.tempdir,
         au_generator_uri=gspaths.ChromeosReleases.GeneratorUri(
-            payload.tgt_image.channel, payload.tgt_image.board, '6351.0.0'),
+            payload.tgt_image.channel, payload.tgt_image.board, '7587.0.0'),
         sign=sign)
 
     self.assertTrue(os.path.exists(output_uri))
diff --git a/lib/portage_util.py b/lib/portage_util.py
index 41db764..c0fe77f 100644
--- a/lib/portage_util.py
+++ b/lib/portage_util.py
@@ -857,36 +857,6 @@
 
     return ebuild_projects
 
-  @classmethod
-  def UpdateCommitHashesForChanges(cls, changes, buildroot, manifest):
-    """Updates the commit hashes for the EBuilds uprevved in changes.
-
-    Args:
-      changes: Changes from Gerrit that are being pushed.
-      buildroot: Path to root of build directory.
-      manifest: git.ManifestCheckout object.
-    """
-    path_sha1s = {}
-    overlay_list = FindOverlays(constants.BOTH_OVERLAYS, buildroot=buildroot)
-    ebuild_paths = cls._GetEBuildPaths(buildroot, manifest, overlay_list,
-                                       changes)
-    for ebuild, paths in ebuild_paths.iteritems():
-      # Calculate any SHA1s that are not already in path_sha1s.
-      for path in set(paths).difference(path_sha1s):
-        path_sha1s[path] = cls._GetSHA1ForPath(manifest, path)
-
-      sha1s = [path_sha1s[path] for path in paths]
-      logging.info('Updating ebuild for package %s with commit hashes %r',
-                   ebuild.package, sha1s)
-      updates = dict(CROS_WORKON_COMMIT=cls.FormatBashArray(sha1s))
-      EBuild.UpdateEBuild(ebuild.ebuild_path, updates)
-
-    # Commit any changes to all overlays.
-    for overlay in overlay_list:
-      if EBuild.GitRepoHasChanges(overlay):
-        EBuild.CommitChange('Updating commit hashes in ebuilds '
-                            'to match remote repository.', overlay=overlay)
-
 
 class PortageDBException(Exception):
   """Generic PortageDB error."""
diff --git a/lib/portage_util_unittest.py b/lib/portage_util_unittest.py
index 216c7f7..3183303 100644
--- a/lib/portage_util_unittest.py
+++ b/lib/portage_util_unittest.py
@@ -7,7 +7,6 @@
 from __future__ import print_function
 
 import cStringIO
-import mock
 import os
 
 from chromite.cbuildbot import constants
@@ -454,33 +453,6 @@
     self.m_ebuild.CommitChange(mock_message, '.')
     m.assert_called_once_with('.', ['commit', '-a', '-m', 'Commitme'])
 
-  def testUpdateCommitHashesForChanges(self):
-    """Tests that we can update the commit hashes for changes correctly."""
-    build_root = 'fakebuildroot'
-    overlays = ['public_overlay']
-    changes = ['fake change']
-    paths = ['fake_path1', 'fake_path2']
-    sha1s = ['sha1', 'shaaaaaaaaaaaaaaaa2']
-    path_ebuilds = {self.m_ebuild: paths}
-
-    self.PatchObject(portage_util, 'FindOverlays', return_value=overlays)
-    self.PatchObject(portage_util.EBuild, '_GetEBuildPaths',
-                     return_value=path_ebuilds)
-    self.PatchObject(portage_util.EBuild, '_GetSHA1ForPath',
-                     side_effect=reversed(sha1s))
-    update_mock = self.PatchObject(portage_util.EBuild, 'UpdateEBuild')
-    self.PatchObject(portage_util.EBuild, 'GitRepoHasChanges',
-                     return_value=True)
-    commit_mock = self.PatchObject(portage_util.EBuild, 'CommitChange')
-
-    portage_util.EBuild.UpdateCommitHashesForChanges(changes, build_root,
-                                                     MANIFEST)
-
-    update_mock.assert_called_once_with(
-        self.m_ebuild.ebuild_path,
-        {'CROS_WORKON_COMMIT': '(%s)' % ' '.join('"%s"' % x for x in sha1s)})
-    commit_mock.assert_called_once_with(mock.ANY, overlay=overlays[0])
-
   def testGitRepoHasChanges(self):
     """Tests that GitRepoHasChanges works correctly."""
     git.RunGit(self.tempdir,
diff --git a/lib/project_sdk.py b/lib/project_sdk.py
deleted file mode 100644
index e547a5f..0000000
--- a/lib/project_sdk.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Common utilities for working with Project SDK."""
-
-from __future__ import print_function
-
-import os
-import re
-import stat
-
-from chromite.cbuildbot import constants
-from chromite.lib import cros_build_lib
-from chromite.lib import cros_logging as logging
-from chromite.lib import osutils
-from chromite.lib import workspace_lib
-
-
-def FindRepoRoot(sdk_dir=None):
-  """Locate the SDK root directly by looking for .repo dir.
-
-  This is very similar to constants.SOURCE_ROOT, except that it can operate
-  against repo checkouts outside our current code base.
-
-  CAUTION! Using SDKs from directories other than the default is likely to
-  break assumptions that our tools are built upon.  As a rule of thumb, do not
-  expose this argument externally unless you know what you're doing.
-
-  Args:
-    sdk_dir: Path of the SDK, or any dir inside it. None defaults to
-      constants.SOURCE_ROOT.
-
-  Returns:
-    Root dir of SDK, or None.
-  """
-  if sdk_dir is None:
-    return constants.SOURCE_ROOT
-
-  # Make sure we're looking at an actual directory.
-  if not os.path.isdir(sdk_dir):
-    return None
-
-  # Find the .repo directory and return the path leading up to it, if found.
-  repo_dir = osutils.FindInPathParents('.repo', os.path.abspath(sdk_dir),
-                                       test_func=os.path.isdir)
-  return os.path.dirname(repo_dir) if repo_dir else None
-
-
-def VersionFile(sdk_dir):
-  return os.path.join(sdk_dir, 'SDK_VERSION')
-
-
-def FindVersion(sdk_dir=None):
-  """Find the version of a given SDK.
-
-  If the SDK was fetched by any means other than "brillo sdk" then it will
-  always appear to be 'non-official', even if an official manifest was used.
-
-  Args:
-    sdk_dir: path to the SDK, or any of its sub directories.
-
-  Returns:
-    The version of your SDK as a string. '6500.0.0'
-    None if the directory doesn't appear to be an SDK.
-  """
-  sdk_root = FindRepoRoot(sdk_dir)
-  if sdk_root is None:
-    return None
-
-  v_file = VersionFile(sdk_root)
-  return osutils.ReadFile(v_file) if os.path.exists(v_file) else None
-
-
-def _GetExecutableVersion(cmd, version_arg='--version'):
-  """Gets an executable version string using |version_flag|.
-
-  Args:
-    cmd: Executable to check (for example, '/bin/bash').
-    version_arg: Argument to get |cmd| to print its version.
-
-  Returns:
-    Output string or None if the program doesn't exist or gave a
-    non-zero exit code.
-  """
-  try:
-    return cros_build_lib.RunCommand(
-        [cmd, version_arg], print_cmd=False, capture_output=True).output
-  except cros_build_lib.RunCommandError:
-    return None
-
-
-def VerifyEnvironment(workspace_path=None):
-  """Verify the environment we are installed to.
-
-  Disk permissions are only verified if a workspace path is provided.
-
-  Args:
-    workspace_path: Root directory of the workspace or None.
-
-  Returns:
-    boolean: True if the environment looks friendly.
-  """
-  result = True
-
-  # Verify Python:
-  #   We assume the python environment is acceptable, because we got here.
-  #   However, we can add imports here to check for any required external
-  #   packages.
-
-  # Verify executables that just need to exist.
-  for cmd in ('/bin/bash', 'curl'):
-    if _GetExecutableVersion(cmd) is None:
-      logging.error('%s is required to use the SDK.', cmd)
-      result = False
-
-  # Verify Git version.
-  git_requirement_message = 'git 1.8 or greater is required to use the SDK.'
-  git_version = _GetExecutableVersion('git')
-  if git_version is None:
-    logging.error(git_requirement_message)
-    result = False
-
-  # Example version string: 'git version 2.2.0.rc0.207.ga3a616c'.
-  m = re.match(r'git version (\d+)\.(\d+)', git_version)
-  if not m:
-    logging.error(git_requirement_message)
-    logging.error("git version not recognized from: '%s'.", git_version)
-    result = False
-  else:
-    gv_int_list = [int(d) for d in m.groups()] # Something like [2, 3]
-    if gv_int_list < [1, 8]:
-      logging.error(git_requirement_message)
-      logging.error("Current version: '%s'.", git_version)
-      result = False
-
-  # If a workspace path is provided, validate chroot requirements.
-  if workspace_path:
-    chroot_dir = workspace_lib.ChrootPath(workspace_path)
-
-    # Create a file with the suid bit set.
-    suid_file = os.path.join(chroot_dir, 'suid_test')
-    try:
-      # Create a file with the SUID set for the owner.
-      osutils.Touch(suid_file, makedirs=True, mode=stat.S_ISUID)
-
-      # See if the SUID bit will be respected, or ignored.
-      st = os.statvfs(suid_file)
-
-      # The os.ST_NOSUID constant wasn't added until python-3.2.
-      if st.f_flag & 0x2:
-        logging.error(
-            'Your current chroot directory (%s) does not support the SUID bit,'
-            ' which is required. You can move the chroot to a new location'
-            ' using "brillo chroot --move <new_dir>"', chroot_dir)
-        result = False
-    finally:
-      osutils.SafeUnlink(suid_file)
-
-  return result
diff --git a/lib/project_sdk_unittest b/lib/project_sdk_unittest
deleted file mode 120000
index 72196ce..0000000
--- a/lib/project_sdk_unittest
+++ /dev/null
@@ -1 +0,0 @@
-../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/project_sdk_unittest.py b/lib/project_sdk_unittest.py
deleted file mode 100644
index 7859c02..0000000
--- a/lib/project_sdk_unittest.py
+++ /dev/null
@@ -1,168 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for the project_sdk library."""
-
-from __future__ import print_function
-
-import os
-
-from chromite.cbuildbot import constants
-from chromite.lib import cros_build_lib_unittest
-from chromite.lib import cros_test_lib
-from chromite.lib import osutils
-from chromite.lib import project_sdk
-
-# pylint: disable=protected-access
-
-class ProjectSdkTest(cros_test_lib.TempDirTestCase):
-  """Unittest for project_sdk.py"""
-
-  def setUp(self):
-    self.version = '1.2.3'
-
-    # Define assorted paths to test against.
-    self.bogus_dir = os.path.join(self.tempdir, 'bogus')
-    self.repo_dir = os.path.join(self.tempdir, 'repo')
-    self.nested_dir = os.path.join(self.repo_dir, 'foo', 'bar')
-
-    # Create fake repo with sub-dirs.
-    osutils.SafeMakedirs(os.path.join(self.repo_dir, '.repo'))
-    osutils.SafeMakedirs(self.nested_dir)
-
-    version_file = project_sdk.VersionFile(self.repo_dir)
-    osutils.WriteFile(version_file, self.version)
-
-  def testFindRepoRootCurrentRepo(self):
-    """Test FindRepoRoot with default of CWD."""
-    self.assertEqual(constants.SOURCE_ROOT, project_sdk.FindRepoRoot())
-
-  def testFindRepoRootSpecifiedBogus(self):
-    """Test FindRepoRoot with non-existent directory outside the repo."""
-    self.assertIsNone(project_sdk.FindRepoRoot(self.bogus_dir))
-
-  def testFindRepoRootSpecifiedRoot(self):
-    """Test FindRepoRoot with top level of repo tree."""
-    self.assertEqual(self.repo_dir, project_sdk.FindRepoRoot(self.repo_dir))
-
-  def testFindRepoRootSpecifiedNested(self):
-    """Test FindRepoRoot with nested inside repo tree."""
-    self.assertEqual(self.repo_dir, project_sdk.FindRepoRoot(self.nested_dir))
-
-  def testFindRepoRootSpecifiedNonexistent(self):
-    """Test FindRepoRoot refuses to scan a nonexistent path."""
-    self.assertIsNone(
-        project_sdk.FindRepoRoot(os.path.join(self.nested_dir, 'not_there')))
-
-  def testVersionFile(self):
-    self.assertEqual('/foo/SDK_VERSION', project_sdk.VersionFile('/foo'))
-
-  def testFindVersionDefault(self):
-    """Test FindVersion with default of CWD."""
-    # Expected results are undefined, just ensure we don't crash.
-    project_sdk.FindVersion()
-
-  def testFindVersionBogus(self):
-    """Test FindVersion with non-existent directory outside the repo."""
-    self.assertIsNone(project_sdk.FindVersion(self.bogus_dir))
-
-  def testFindVersionSpecifiedRoot(self):
-    """Test FindVersion with top level of repo tree."""
-    self.assertEqual(self.version, project_sdk.FindVersion(self.repo_dir))
-
-  def testFindVersionSpecifiedNested(self):
-    """Test FindVersion with nested inside repo tree."""
-    self.assertEqual(self.version, project_sdk.FindVersion(self.nested_dir))
-
-
-class ProjectSdkVerifyFake(cros_test_lib.MockTempDirTestCase):
-  """Test VerifyEnvironment with mocks."""
-
-  def setUp(self):
-    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
-
-  def MockEnvironment(self, bash=True, git='2.2.0', curl=True, suid=True):
-    """Mocks an environment for the specified checks.
-
-    When adding a new environment check, add it to this function so that
-    each test can isolate just the check it wants to test.
-
-    Args:
-      bash: True to mock a valid bash environment.
-      git: If set, git version to mock.
-      curl: True to mock a valid curl environment.
-      suid: True to mock a valid suid environment.
-    """
-    if bash:
-      self.rc_mock.AddCmdResult(['/bin/bash', '--version'])
-    if git:
-      self.rc_mock.AddCmdResult(['git', '--version'],
-                                output='git version %s' % git)
-    if curl:
-      self.rc_mock.AddCmdResult(['curl', '--version'])
-    if suid:
-      self.PatchObject(os, 'statvfs', autospec=True).return_value.f_flag = 0x0
-
-  def testMockEnvironment(self):
-    """Test that MockEnvironment() by itself sets up a valid env."""
-    self.MockEnvironment()
-
-    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testGTrusty(self):
-    """Test with mock of 'gTrusty' distribution."""
-    self.MockEnvironment(git='2.2.0.rc0.207.ga3a616c')
-
-    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testUbuntu14(self):
-    """Test with mock of 'Ubuntu LTS 14' distribution."""
-    self.MockEnvironment(git='2.1.0')
-
-    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testGitNewEnough(self):
-    """Test with mock of git 1.8."""
-    self.MockEnvironment(git='1.8.3.1')
-
-    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testFailNoBash(self):
-    """Test with mock of no bash present."""
-    self.MockEnvironment(bash=False)
-    self.rc_mock.AddCmdResult(['/bin/bash', '--version'], returncode=127)
-
-    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testFailGitTooOld(self):
-    """Test with mock of git too old to use."""
-    self.MockEnvironment(git='1.7.10.4')
-
-    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testFailNoCurl(self):
-    """Test with mock of no curl present."""
-    self.MockEnvironment(curl=False)
-    self.rc_mock.AddCmdResult(['curl', '--version'], returncode=127)
-
-    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testFailSuid(self):
-    """Test with SUID is disabled."""
-    self.MockEnvironment(suid=False)
-    # The os.ST_NOSUID constant wasn't added until python-3.2.
-    self.PatchObject(os, 'statvfs', autospec=True).return_value.f_flag = 0x2
-
-    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
-
-class ProjectSdkVerifyReal(cros_test_lib.TempDirTestCase):
-  """Test VerifyEnvironment for real."""
-
-  def testVerifyEnvironment(self):
-    """Test, assuming production environment is valid."""
-    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
-
-  def testGetExecutableVersionNonExistent(self):
-    """Tests _GetExecutableVersion() when the executable doesn't exist."""
-    self.assertIsNone(project_sdk._GetExecutableVersion('/not/a/real/program'))
diff --git a/lib/sysroot_lib.py b/lib/sysroot_lib.py
index b04d4bf..74fe4e9 100644
--- a/lib/sysroot_lib.py
+++ b/lib/sysroot_lib.py
@@ -316,31 +316,6 @@
     return self._GenerateConfig(toolchains, board_overlays, portdir_overlays,
                                 header, BOARD_USE=board)
 
-  def GenerateBrickConfig(self, bricks, bsp=None):
-    """Generates the configuration for a given brick stack and bsp.
-
-    Args:
-      bricks: The brick stack, expanded, excluding the bsp.
-      bsp: BSP to use.
-    """
-    brick_list = bricks
-    if bsp:
-      brick_list = bsp.BrickStack() + brick_list
-
-    board_overlays = [b.OverlayDir() for b in brick_list]
-    portdir_overlays = [_CHROMIUMOS_OVERLAY, _ECLASS_OVERLAY] + board_overlays
-
-    # If the bsp is not set use the highest priority brick. This is meant to
-    # preserve support for building with --brick.
-    # TODO(bsimonnet): remove this once we remove support for --brick
-    # (brbug.com/916).
-    bsp = bsp or bricks[-1]
-    toolchains = toolchain.GetToolchainsForBrick(bsp.brick_locator)
-
-    header = '# Autogenerated by chromite.lib.sysroot_lib.'
-    return self._GenerateConfig(toolchains, board_overlays, portdir_overlays,
-                                header)
-
   def WriteConfig(self, config):
     """Writes the configuration.
 
diff --git a/lib/toolchain.py b/lib/toolchain.py
index cedc9a0..d20ed41 100644
--- a/lib/toolchain.py
+++ b/lib/toolchain.py
@@ -9,7 +9,6 @@
 import cStringIO
 
 from chromite.cbuildbot import constants
-from chromite.lib import brick_lib
 from chromite.lib import cros_build_lib
 from chromite.lib import gs
 from chromite.lib import portage_util
@@ -65,20 +64,6 @@
   return targets
 
 
-def GetToolchainsForBrick(brick_locator):
-  """Get a dictionary mapping toolchain targets to their options for a brick.
-
-  Args:
-    brick_locator: locator for the brick.
-
-  Returns:
-    The list of toolchain tuples for the given brick.
-  """
-  toolchains = toolchain_list.ToolchainList(
-      brick=brick_lib.Brick(brick_locator))
-  return toolchains.GetMergedToolchainSettings()
-
-
 def FilterToolchains(targets, key, value):
   """Filter out targets based on their attributes.
 
diff --git a/lib/toolchain_list.py b/lib/toolchain_list.py
index 432436b..261e36e 100644
--- a/lib/toolchain_list.py
+++ b/lib/toolchain_list.py
@@ -22,7 +22,7 @@
 
 
 class NoDefaultToolchainDefinedError(Exception):
-  """Brillo brick stacks are required to define a default toolchain."""
+  """Overlays are required to define a default toolchain."""
 
 
 class MismatchedToolchainConfigsError(Exception):
@@ -32,28 +32,20 @@
 class ToolchainList(object):
   """Represents a list of toolchains."""
 
-  def __init__(self, brick=None, overlays=None):
+  def __init__(self, overlays):
     """Construct an instance.
 
     Args:
-      brick: brick_lib.Brick object.  We'll add the toolchains used by the brick
-          and its dependencies to |self|.
       overlays: list of overlay directories to add toolchains from.
     """
-    if brick is None and overlays is None:
-      raise ValueError('Must specify either brick or overlays.')
-    if brick is not None and overlays is not None:
-      raise ValueError('Must specify one of brick or overlays.')
+    if overlays is None:
+      raise ValueError('Must specify overlays.')
 
     self._toolchains = []
     self._require_explicit_default_toolchain = True
-    if brick:
-      for each_brick in brick.BrickStack():
-        self._AddToolchainsFromBrick(each_brick)
-    else:
-      self._require_explicit_default_toolchain = False
-      for overlay_path in overlays:
-        self._AddToolchainsFromOverlayDir(overlay_path)
+    self._require_explicit_default_toolchain = False
+    for overlay_path in overlays:
+      self._AddToolchainsFromOverlayDir(overlay_path)
 
   def _AddToolchainsFromOverlayDir(self, overlay_dir):
     """Add toolchains to |self| from the given overlay.
@@ -79,15 +71,6 @@
       settings = json.loads(line_pieces[1]) if len(line_pieces) > 1 else {}
       self._AddToolchain(target, setting_overrides=settings)
 
-  def _AddToolchainsFromBrick(self, brick):
-    """Add toolchains to |self| defined by the given brick.
-
-    Args:
-      brick: brick_lib.Brick object.
-    """
-    for target, settings in brick.config.get('toolchains', {}):
-      self._AddToolchain(target, setting_overrides=settings)
-
   def _AddToolchain(self, target, setting_overrides=None):
     """Add a toolchain to |self|.
 
diff --git a/lib/toolchain_unittest.py b/lib/toolchain_unittest.py
index 1e44214..d11c983 100644
--- a/lib/toolchain_unittest.py
+++ b/lib/toolchain_unittest.py
@@ -9,13 +9,10 @@
 import mock
 import os
 
-from chromite.lib import brick_lib
 from chromite.lib import cros_build_lib_unittest
 from chromite.lib import cros_test_lib
 from chromite.lib import osutils
 from chromite.lib import toolchain
-from chromite.lib import toolchain_list
-from chromite.lib import workspace_lib
 
 
 BASE_TOOLCHAIN_CONF = """# The root of all evil is money, err, this config.
@@ -32,24 +29,6 @@
 bonus-toolchain {"stable": true}
 """
 
-MODERN_BSP_BRICK_CONFIG = {
-    'name': 'bsp-brick',
-    'toolchains': [('base-target-name', {'default': True}),
-                   ('bonus-toolchain', {'a setting': 'bonus value'})
-                  ],
-    'dependencies': ['//custom-firmware-brick'],
-}
-
-MODERN_FIRMWARE_BRICK_CONFIG = {
-    'name': 'custom-firmware-brick',
-    'toolchains': [('bonus-toolchain', {'stable': True}),
-                   ('extra-toolchain', {})],
-}
-
-TYPICAL_BRICK_WITHOUT_TOOLCHAINS = {
-    'name': 'custom-firmware-brick',
-}
-
 EXPECTED_TOOLCHAINS = {
     'bonus-toolchain': {
         'sdk': True,
@@ -66,13 +45,6 @@
 class ToolchainTest(cros_test_lib.MockTempDirTestCase):
   """Tests for lib.toolchain."""
 
-  def _MakeBrick(self, config):
-    return brick_lib.Brick(os.path.join(self.tempdir, config['name']),
-                           initial_config=config)
-
-  def setUp(self):
-    self.PatchObject(workspace_lib, 'WorkspacePath', return_value=self.tempdir)
-
   def testArchForToolchain(self):
     """Tests that we correctly parse crossdev's output."""
     rc_mock = cros_build_lib_unittest.RunCommandMock()
@@ -104,46 +76,3 @@
     find_overlays_mock.return_value = overlays
     actual_targets = toolchain.GetToolchainsForBoard('board_value')
     self.assertEqual(EXPECTED_TOOLCHAINS, actual_targets)
-
-  def testReadsBrickToolchains(self):
-    """Tests that we can read the toolchain for a brick stack."""
-    # Creates the brick in a subdirectory of tempdir so that we can create other
-    # bricks without interfering with it.
-    self._MakeBrick(MODERN_FIRMWARE_BRICK_CONFIG)
-    top_brick = self._MakeBrick(MODERN_BSP_BRICK_CONFIG)
-    self.assertEqual(EXPECTED_TOOLCHAINS,
-                     toolchain.GetToolchainsForBrick(top_brick.brick_locator))
-
-  def testShouldDetectMissingDefaultsInBricks(self):
-    """Tests that we check for a default toolchain in bricks."""
-    brick = self._MakeBrick(
-        {'name': 'brick-name', 'toolchains': [('base-toolchain', {})]})
-    self.assertRaises(toolchain_list.NoDefaultToolchainDefinedError,
-                      toolchain.GetToolchainsForBrick,
-                      brick.brick_locator)
-
-  def testShouldDetectConflictingOverrides(self):
-    """Tests that we disallow toolchains with obvious conflicting settings."""
-    conflicting_brick = self._MakeBrick(
-        {'name': 'conflicting-brick',
-         'toolchains': [
-             ('base-toolchain', {'default': True,
-                                 'setting': 'conflicting value'}),
-         ],
-        })
-    brick = self._MakeBrick(
-        {'name': 'bsp-brick',
-         'toolchains': [
-             ('base-toolchain', {'default': True,
-                                 'setting': 'bsp value'}),
-         ],
-         'dependencies': [conflicting_brick.brick_locator],
-        })
-    self.assertRaises(toolchain_list.MismatchedToolchainConfigsError,
-                      toolchain.GetToolchainsForBrick,
-                      brick.brick_locator)
-
-  def testToleratesBricksWithoutToolchains(self):
-    """Tests that we correctly handle bricks that are toolchain agnostic."""
-    simple_brick = self._MakeBrick(TYPICAL_BRICK_WITHOUT_TOOLCHAINS)
-    toolchain.GetToolchainsForBrick(simple_brick.brick_locator)
diff --git a/lib/workspace_lib.py b/lib/workspace_lib.py
deleted file mode 100644
index bdcd98c..0000000
--- a/lib/workspace_lib.py
+++ /dev/null
@@ -1,329 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utilities for discovering the directories associated with workspaces.
-
-Workspaces have a variety of important concepts:
-
-* The bootstrap repository. BOOTSTRAP/chromite/bootstrap is expected to be in
-the user's path. Most commands run from here redirect to the active SDK.
-
-* The workspace directory. This directory (identified by presence of
-WORKSPACE_CONFIG), contains code, and is associated with exactly one SDK
-instance. It is normally discovered based on CWD.
-
-* The SDK root. This directory contains a specific SDK version, and is stored in
-BOOTSTRAP/sdk_checkouts/<version>.
-
-This library contains helper methods for finding all of the relevant directories
-here.
-"""
-
-from __future__ import print_function
-
-import json
-import os
-
-from chromite.cbuildbot import constants
-from chromite.lib import cros_build_lib
-from chromite.lib import osutils
-
-MAIN_CHROOT_DIR_IN_VM = '/chroots'
-
-# The presence of this file signifies the root of a workspace.
-WORKSPACE_CONFIG = 'workspace-config.json'
-WORKSPACE_LOCAL_CONFIG = '.local.json'
-WORKSPACE_CHROOT_DIR = '.chroot'
-WORKSPACE_IMAGES_DIR = 'build/images'
-WORKSPACE_LOGS_DIR = 'build/logs'
-
-# Prefixes used by locators.
-_BOARD_LOCATOR_PREFIX = 'board:'
-_WORKSPACE_LOCATOR_PREFIX = '//'
-
-
-class LocatorNotResolved(Exception):
-  """Given locator could not be resolved."""
-
-
-class ConfigFileError(Exception):
-  """Configuration file writing or reading failed."""
-
-
-def WorkspacePath(workspace_reference_dir=None):
-  """Returns the path to the current workspace.
-
-  This method works both inside and outside the chroot, though results will
-  be different.
-
-  Args:
-    workspace_reference_dir: Any directory inside the workspace. If None,
-      will use CWD (outside chroot), or bind mount location (inside chroot).
-      You should normally use the default.
-
-  Returns:
-    Path to root directory of the workspace (if valid), or None.
-  """
-  if workspace_reference_dir is None:
-    if cros_build_lib.IsInsideChroot():
-      workspace_reference_dir = constants.CHROOT_WORKSPACE_ROOT
-    else:
-      workspace_reference_dir = os.getcwd()
-
-  workspace_config = osutils.FindInPathParents(
-      WORKSPACE_CONFIG,
-      os.path.abspath(workspace_reference_dir))
-
-  return os.path.dirname(workspace_config) if workspace_config else None
-
-
-def ChrootPath(workspace_path):
-  """Returns the path to the chroot associated with the given workspace.
-
-  Each workspace has its own associated chroot. This method returns the chroot
-  path set in the workspace config if present, or else the default location,
-  which varies depending on whether or not we run in a VM.
-
-  Args:
-    workspace_path: Root directory of the workspace (WorkspacePath()).
-
-  Returns:
-    Path to where the chroot is, or where it should be created.
-  """
-  config_value = GetChrootDir(workspace_path)
-  if config_value:
-    # If the config value is a relative path, we base it in the workspace path.
-    # Otherwise, it is an absolute path and will be returned as is.
-    return os.path.join(workspace_path, config_value)
-
-  # The default for a VM.
-  if osutils.IsInsideVm():
-    return os.path.join(MAIN_CHROOT_DIR_IN_VM, os.path.basename(workspace_path))
-
-  # The default for all other cases.
-  return os.path.join(workspace_path, WORKSPACE_CHROOT_DIR)
-
-
-def SetChrootDir(workspace_path, chroot_dir):
-  """Set which chroot directory a workspace uses.
-
-  This value will overwrite the default value, if set. This is normally only
-  used if the user overwrites the default value. This method is NOT atomic.
-
-  Args:
-    workspace_path: Root directory of the workspace (WorkspacePath()).
-    chroot_dir: Directory in which this workspaces chroot should be created.
-  """
-  # Read the config, update its chroot_dir, and write it.
-  config = _ReadLocalConfig(workspace_path)
-  config['chroot_dir'] = chroot_dir
-  _WriteLocalConfig(workspace_path, config)
-
-
-def GetChrootDir(workspace_path):
-  """Get override of chroot directory for a workspace.
-
-  You should normally call ChrootPath so that the default value will be
-  found if no explicit value has been set.
-
-  Args:
-    workspace_path: Root directory of the workspace (WorkspacePath()).
-
-  Returns:
-    version string or None.
-  """
-  # Config should always return a dictionary.
-  config = _ReadLocalConfig(workspace_path)
-
-  # If version is present, use it, else return None.
-  return config.get('chroot_dir')
-
-
-def GetActiveSdkVersion(workspace_path):
-  """Find which SDK version a workspace is associated with.
-
-  This SDK may or may not exist in the bootstrap cache. There may be no
-  SDK version associated with a workspace.
-
-  Args:
-    workspace_path: Root directory of the workspace (WorkspacePath()).
-
-  Returns:
-    version string or None.
-  """
-  # Config should always return a dictionary.
-  config = _ReadLocalConfig(workspace_path)
-
-  # If version is present, use it, else return None.
-  return config.get('version')
-
-
-def SetActiveSdkVersion(workspace_path, version):
-  """Set which SDK version a workspace is associated with.
-
-  This method is NOT atomic.
-
-  Args:
-    workspace_path: Root directory of the workspace (WorkspacePath()).
-    version: Version string of the SDK. (Eg. 1.2.3)
-  """
-  # Read the config, update its version, and write it.
-  config = _ReadLocalConfig(workspace_path)
-  config['version'] = version
-  _WriteLocalConfig(workspace_path, config)
-
-
-def _ReadLocalConfig(workspace_path):
-  """Read a local config for a workspace.
-
-  Args:
-    workspace_path: Root directory of the workspace (WorkspacePath()).
-
-  Returns:
-    Local workspace config as a Python dictionary.
-  """
-  try:
-    return ReadConfigFile(os.path.join(workspace_path, WORKSPACE_LOCAL_CONFIG))
-  except IOError:
-    # If the file doesn't exist, it's an empty dictionary.
-    return {}
-
-
-def _WriteLocalConfig(workspace_path, config):
-  """Save out a new local config for a workspace.
-
-  Args:
-    workspace_path: Root directory of the workspace (WorkspacePath()).
-    config: New local workspace config contents as a Python dictionary.
-  """
-  WriteConfigFile(os.path.join(workspace_path, WORKSPACE_LOCAL_CONFIG), config)
-
-
-def IsLocator(name):
-  """Returns True if name is a specific locator."""
-  if not name:
-    raise ValueError('Locator is empty')
-  return (name.startswith(_WORKSPACE_LOCATOR_PREFIX)
-          or name.startswith(_BOARD_LOCATOR_PREFIX))
-
-
-def LocatorToPath(locator):
-  """Returns the absolute path for this locator.
-
-  Args:
-    locator: a locator.
-
-  Returns:
-    The absolute path defined by this locator.
-
-  Raises:
-    ValueError: If |locator| is invalid.
-    LocatorNotResolved: If |locator| is valid but could not be resolved.
-  """
-  if locator.startswith(_WORKSPACE_LOCATOR_PREFIX):
-    workspace_path = WorkspacePath()
-    if workspace_path is None:
-      raise LocatorNotResolved(
-          'Workspace not found while trying to resolve %s' % locator)
-    return os.path.join(workspace_path,
-                        locator[len(_WORKSPACE_LOCATOR_PREFIX):])
-
-  if locator.startswith(_BOARD_LOCATOR_PREFIX):
-    return os.path.join(constants.SOURCE_ROOT, 'src', 'overlays',
-                        'overlay-%s' % locator[len(_BOARD_LOCATOR_PREFIX):])
-
-  raise ValueError('Invalid locator %s' % locator)
-
-
-def PathToLocator(path):
-  """Converts a path to a locator.
-
-  This does not raise error if the path does not map to a locator. Some valid
-  (legacy) brick path do not map to any locator: chromiumos-overlay,
-  private board overlays, etc...
-
-  Args:
-    path: absolute or relative to CWD path to a workspace object or board
-      overlay.
-
-  Returns:
-    The locator for this path if it exists, None otherwise.
-  """
-  workspace_path = WorkspacePath()
-  path = os.path.abspath(path)
-
-  if workspace_path is None:
-    return None
-
-  # If path is in the current workspace, return the relative path prefixed with
-  # the workspace prefix.
-  if os.path.commonprefix([path, workspace_path]) == workspace_path:
-    return _WORKSPACE_LOCATOR_PREFIX + os.path.relpath(path, workspace_path)
-
-  # If path is in the src directory of the checkout, this is a board overlay.
-  # Encode it as board locator.
-  src_path = os.path.join(constants.SOURCE_ROOT, 'src')
-  if os.path.commonprefix([path, src_path]) == src_path:
-    parts = os.path.split(os.path.relpath(path, src_path))
-    if parts[0] == 'overlays':
-      board_name = '-'.join(parts[1].split('-')[1:])
-      return _BOARD_LOCATOR_PREFIX + board_name
-
-  return None
-
-
-def LocatorToFriendlyName(locator):
-  """Returns a friendly name for a given locator.
-
-  Args:
-    locator: a locator.
-  """
-  if IsLocator(locator) and locator.startswith(_WORKSPACE_LOCATOR_PREFIX):
-    return locator[len(_WORKSPACE_LOCATOR_PREFIX):].replace('/', '.')
-
-  raise ValueError('Not a valid workspace locator: %s' % locator)
-
-
-def WriteConfigFile(path, config):
-  """Writes |config| to a file at |path|.
-
-  Configuration files in a workspace should all use the same format
-  whenever possible. Currently it's JSON, but centralizing config
-  read/write makes it easier to change when needed.
-
-  Args:
-    path: path to write.
-    config: configuration dictionary to write.
-
-  Raises:
-    ConfigFileError: |config| cannot be written as JSON.
-  """
-  # TODO(dpursell): Add support for comments in config files.
-  try:
-    osutils.WriteFile(
-        path,
-        json.dumps(config, sort_keys=True, indent=4, separators=(',', ': ')),
-        makedirs=True)
-  except TypeError as e:
-    raise ConfigFileError('Writing config file %s failed: %s', path, e)
-
-
-def ReadConfigFile(path):
-  """Reads a configuration file at |path|.
-
-  For use with WriteConfigFile().
-
-  Args:
-    path: file path.
-
-  Returns:
-    Result of parsing the JSON file.
-
-  Raises:
-    ConfigFileError: JSON parsing failed.
-  """
-  try:
-    return json.loads(osutils.ReadFile(path))
-  except ValueError as e:
-    raise ConfigFileError('%s is not in valid JSON format: %s' % (path, e))
diff --git a/lib/workspace_lib_unittest b/lib/workspace_lib_unittest
deleted file mode 120000
index 72196ce..0000000
--- a/lib/workspace_lib_unittest
+++ /dev/null
@@ -1 +0,0 @@
-../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/workspace_lib_unittest.py b/lib/workspace_lib_unittest.py
deleted file mode 100644
index b5378f9..0000000
--- a/lib/workspace_lib_unittest.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for the workspace_lib library."""
-
-from __future__ import print_function
-
-import mock
-import os
-
-from chromite.cbuildbot import constants
-from chromite.lib import cros_build_lib
-from chromite.lib import cros_test_lib
-from chromite.lib import osutils
-from chromite.lib import workspace_lib
-
-# pylint: disable=protected-access
-
-class WorkspaceLibTest(cros_test_lib.TempDirTestCase):
-  """Unittests for workspace_lib.py"""
-
-  def setUp(self):
-    # Define assorted paths to test against.
-    self.bogus_dir = os.path.join(self.tempdir, 'bogus')
-
-    self.workspace_dir = os.path.join(self.tempdir, 'workspace')
-    self.workspace_config = os.path.join(self.workspace_dir,
-                                         workspace_lib.WORKSPACE_CONFIG)
-    self.workspace_nested = os.path.join(self.workspace_dir, 'foo', 'bar')
-    # Create workspace directories and files.
-    osutils.Touch(self.workspace_config, makedirs=True)
-    osutils.SafeMakedirs(self.workspace_nested)
-
-  @mock.patch('os.getcwd')
-  @mock.patch.object(cros_build_lib, 'IsInsideChroot', return_value=False)
-  def testWorkspacePathOutsideChroot(self, _mock_inside, mock_cwd):
-    # Set default to a dir outside the workspace.
-    mock_cwd.return_value = self.bogus_dir
-
-    # Inside the workspace, specified dir.
-    self.assertEqual(self.workspace_dir,
-                     workspace_lib.WorkspacePath(self.workspace_dir))
-    self.assertEqual(self.workspace_dir,
-                     workspace_lib.WorkspacePath(self.workspace_nested))
-
-    # Outside the workspace, specified dir.
-    self.assertEqual(None, workspace_lib.WorkspacePath(self.tempdir))
-    self.assertEqual(None, workspace_lib.WorkspacePath(self.bogus_dir))
-
-    # Inside the workspace, default dir.
-    mock_cwd.return_value = self.workspace_dir
-    self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
-
-    mock_cwd.return_value = self.workspace_nested
-    self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
-
-    # Outside the workspace, default dir.
-    mock_cwd.return_value = self.tempdir
-    self.assertEqual(None, workspace_lib.WorkspacePath())
-
-    mock_cwd.return_value = self.bogus_dir
-    self.assertEqual(None, workspace_lib.WorkspacePath())
-
-  @mock.patch.object(cros_build_lib, 'IsInsideChroot', return_value=True)
-  def testWorkspacePathInsideChroot(self, _mock_inside):
-    orig_root = constants.CHROOT_WORKSPACE_ROOT
-    try:
-      # Set default to a dir outside the workspace.
-      constants.CHROOT_WORKSPACE_ROOT = self.bogus_dir
-
-      # Inside the workspace, specified dir.
-      self.assertEqual(self.workspace_dir,
-                       workspace_lib.WorkspacePath(self.workspace_dir))
-      self.assertEqual(self.workspace_dir,
-                       workspace_lib.WorkspacePath(self.workspace_nested))
-
-      # Outside the workspace, specified dir.
-      self.assertEqual(None, workspace_lib.WorkspacePath(self.tempdir))
-      self.assertEqual(None, workspace_lib.WorkspacePath(self.bogus_dir))
-
-      # Inside the workspace, default dir.
-      constants.CHROOT_WORKSPACE_ROOT = self.workspace_dir
-      self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
-
-      constants.CHROOT_WORKSPACE_ROOT = self.workspace_nested
-      self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
-
-      # Outside the workspace, default dir.
-      constants.CHROOT_WORKSPACE_ROOT = self.tempdir
-      self.assertEqual(None, workspace_lib.WorkspacePath())
-
-      constants.CHROOT_WORKSPACE_ROOT = self.bogus_dir
-      self.assertEqual(None, workspace_lib.WorkspacePath())
-
-    finally:
-      # Restore our constant to it's real value.
-      constants.CHROOT_WORKSPACE_ROOT = orig_root
-
-  def testChrootPath(self):
-    # Check the default value.
-    self.assertEqual(os.path.join(self.workspace_dir, '.chroot'),
-                     workspace_lib.ChrootPath(self.workspace_dir))
-
-    # Set a new absolute value, check that we get it back.
-    workspace_lib.SetChrootDir(self.workspace_dir, self.bogus_dir)
-    self.assertEqual(self.bogus_dir,
-                     workspace_lib.ChrootPath(self.workspace_dir))
-
-    # Set a new relative path, check that it is properly appended to the
-    # workspace path.
-    workspace_lib.SetChrootDir(self.workspace_dir, 'some/path')
-    self.assertEqual(os.path.join(self.workspace_dir, 'some/path'),
-                     workspace_lib.ChrootPath(self.workspace_dir))
-
-  @mock.patch.object(osutils, 'IsInsideVm', return_value=True)
-  def testChrootPathUnderVm(self, _mock_inside_vm):
-    """Make sure that inside the VM, chroot dir is under /chroots/..."""
-    self.assertEqual(
-        os.path.join(workspace_lib.MAIN_CHROOT_DIR_IN_VM,
-                     os.path.basename(self.workspace_dir)),
-        workspace_lib.ChrootPath(self.workspace_dir)
-    )
-
-  def testReadWriteLocalConfig(self):
-    # Non-existent config should read as an empty dictionary.
-    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
-    self.assertEqual({}, config)
-
-    # Write out an empty dict, and make sure we can read it back.
-    workspace_lib._WriteLocalConfig(self.workspace_dir, {})
-    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
-    self.assertEqual({}, config)
-
-    # Write out a value, and verify we can read it.
-    workspace_lib._WriteLocalConfig(self.workspace_dir, {'version': 'foo'})
-    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
-    self.assertEqual({'version': 'foo'}, config)
-
-    # Overwrite value, and verify we can read it.
-    workspace_lib._WriteLocalConfig(self.workspace_dir, {'version': 'bar'})
-    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
-    self.assertEqual({'version': 'bar'}, config)
-
-  def testReadWriteActiveSdkVersion(self):
-    # If no version is set, value should be None.
-    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
-    self.assertEqual(None, version)
-
-    # Set value, and make sure we can read it.
-    workspace_lib.SetActiveSdkVersion(self.workspace_dir, 'foo')
-    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
-    self.assertEqual('foo', version)
-
-    # Set different value, and make sure we can read it.
-    workspace_lib.SetActiveSdkVersion(self.workspace_dir, 'bar')
-    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
-    self.assertEqual('bar', version)
-
-    # Create config with unrelated values, should be same as no config.
-    workspace_lib._WriteLocalConfig(self.workspace_dir, {'foo': 'bar'})
-    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
-    self.assertEqual(None, version)
-
-    # Set version, and make sure it works.
-    workspace_lib.SetActiveSdkVersion(self.workspace_dir, '1.2.3')
-    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
-    self.assertEqual('1.2.3', version)
-
-    # Ensure all of config is there afterwords.
-    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
-    self.assertEqual({'version': '1.2.3', 'foo': 'bar'}, config)
-
-  @mock.patch('os.getcwd')
-  @mock.patch.object(cros_build_lib, 'IsInsideChroot', return_value=False)
-  def testPathToLocator(self, _mock_inside, mock_cwd):
-    """Tests the path to locator conversion."""
-    ws = self.workspace_dir
-    mock_cwd.return_value = ws
-
-    foo_path = workspace_lib.PathToLocator(os.path.join(ws, 'foo'))
-    baz_path = workspace_lib.PathToLocator(os.path.join(ws, 'bar', 'foo',
-                                                        'baz'))
-    daisy_path = workspace_lib.PathToLocator(os.path.join(constants.SOURCE_ROOT,
-                                                          'src', 'overlays',
-                                                          'overlay-daisy'))
-    some_path = workspace_lib.PathToLocator(os.path.join(constants.SOURCE_ROOT,
-                                                         'srcs', 'bar'))
-
-    self.assertEqual('//foo', foo_path)
-    self.assertEqual('//bar/foo/baz', baz_path)
-    self.assertEqual('board:daisy', daisy_path)
-    self.assertEqual(None, some_path)
-
-    def assertReversible(loc):
-      path = workspace_lib.LocatorToPath(loc)
-      self.assertEqual(loc, workspace_lib.PathToLocator(path))
-
-    assertReversible('//foo')
-    assertReversible('//foo/bar/baz')
-    assertReversible('board:gizmo')
-
-
-class ConfigurationTest(cros_test_lib.TempDirTestCase):
-  """Test WriteConfigFile() and ReadConfigFile()."""
-
-  def testWriteReadConfigFile(self):
-    """Tests WriteConfigFile() then ReadConfigFile()."""
-    path = os.path.join(self.tempdir, 'foo.json')
-    config = {'foo': 1, 'bar': 2}
-
-    workspace_lib.WriteConfigFile(path, config)
-    self.assertDictEqual(config, workspace_lib.ReadConfigFile(path))
-
-  def testWriteConfigFileInvalid(self):
-    """Tests writing an invalid configuration file."""
-    path = os.path.join(self.tempdir, 'foo.json')
-    config = Exception()
-
-    with self.assertRaises(workspace_lib.ConfigFileError):
-      workspace_lib.WriteConfigFile(path, config)
-
-  def testReadConfigFileInvalid(self):
-    """Tests reading an invalid configuration file."""
-    path = os.path.join(self.tempdir, 'foo.json')
-    osutils.WriteFile(path, 'invalid contents')
-
-    with self.assertRaises(workspace_lib.ConfigFileError):
-      workspace_lib.ReadConfigFile(path)
diff --git a/mobmonitor/scripts/mobmonitor.py b/mobmonitor/scripts/mobmonitor.py
index 2bc70b8..e3fff64 100755
--- a/mobmonitor/scripts/mobmonitor.py
+++ b/mobmonitor/scripts/mobmonitor.py
@@ -12,12 +12,14 @@
 import os
 import sys
 
+from cherrypy.lib.static import serve_file
 from logging import handlers as logging_handlers
 
 from chromite.lib import remote_access
 from chromite.lib import commandline
 from chromite.lib import cros_logging as logging
 from chromite.mobmonitor.checkfile import manager
+from chromite.mobmonitor.util import collect_logs
 
 
 STATICDIR = '/etc/mobmonitor/static'
@@ -111,6 +113,12 @@
                                                   args, kwargs)
     return json.dumps(manager.MapServiceStatusToDict(status))
 
+  @cherrypy.expose
+  def CollectLogs(self):
+    tarfile = collect_logs.collect_logs()
+    return serve_file(tarfile, 'application/x-download',
+                      'attachment', os.path.basename(tarfile))
+
 
 def SetupLogging(logdir):
   logging.basicConfig(
diff --git a/mobmonitor/static/js/main.js b/mobmonitor/static/js/main.js
index ec1fad6..f09d36c 100644
--- a/mobmonitor/static/js/main.js
+++ b/mobmonitor/static/js/main.js
@@ -19,7 +19,7 @@
 
   // Setup the log collection button.
   $(document).on('click', '.collect-logs', function() {
-    console.log('Collecting logs!');
+    window.open('/CollectLogs', '_blank');
   });
 
   // Setup the repair action buttons
diff --git a/mobmonitor/util/__init__.py b/mobmonitor/util/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/mobmonitor/util/__init__.py
diff --git a/mobmonitor/util/collect_logs.py b/mobmonitor/util/collect_logs.py
new file mode 100644
index 0000000..503fbdf
--- /dev/null
+++ b/mobmonitor/util/collect_logs.py
@@ -0,0 +1,40 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple log collection script for Mob* Monitor"""
+
+from __future__ import print_function
+
+import os
+import tempfile
+import shutil
+
+from chromite.lib import cros_build_lib
+
+
+TMPDIR_PREFIX = 'moblab_logs_'
+LOG_DIRS = {
+    'system_logs': '/var/log',
+    'autotest_logs': '/usr/local/autotest/logs'
+}
+
+
+def collect_logs():
+  tempdir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX)
+  os.chmod(tempdir, 0o777)
+
+  for name, path in LOG_DIRS.iteritems():
+    if not os.path.exists(path):
+      continue
+    shutil.copytree(path, os.path.join(tempdir, name))
+
+  cmd = ['mobmoncli', 'GetStatus']
+  cros_build_lib.RunCommand(
+      cmd,
+      log_stdout_to_file=os.path.join(tempdir, 'mobmonitor_getstatus')
+  )
+
+  tarball = '%s.tgz' % tempdir
+  cros_build_lib.CreateTarball(tarball, tempdir)
+  return tarball
diff --git a/scripts/cidb_admin.py b/scripts/cidb_admin.py
index 7bf6e57..d4f592a 100644
--- a/scripts/cidb_admin.py
+++ b/scripts/cidb_admin.py
@@ -79,5 +79,3 @@
     db = cidb.CIDBConnection(options.cred_dir)
     db.DropDatabase()
     print('Done.')
-
-
diff --git a/scripts/cros_best_revision.py b/scripts/cros_best_revision.py
index 8416721..ca424ad 100644
--- a/scripts/cros_best_revision.py
+++ b/scripts/cros_best_revision.py
@@ -49,7 +49,7 @@
     self._dryrun = dryrun
     self._lkgm = None
     self._old_lkgm = None
-    self.site_config = config_lib.LoadConfigFromFile()
+    self.site_config = config_lib.GetConfig()
 
 
   def CheckoutChromeLKGM(self):
diff --git a/scripts/cros_brick_utils.py b/scripts/cros_brick_utils.py
deleted file mode 100644
index 102d7a7..0000000
--- a/scripts/cros_brick_utils.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Collection of tools used in scripts while we migrate to bricks."""
-
-from __future__ import print_function
-
-from chromite.lib import brick_lib
-from chromite.lib import commandline
-from chromite.lib import cros_build_lib
-
-
-def ParseArgs(argv):
-  """Parse arguments.
-
-  Args:
-    argv: array of arguments passed to the script.
-  """
-  parser = commandline.ArgumentParser(description=__doc__)
-  parser.add_argument('brick')
-  parser.add_argument(
-      '--friendly-name', action='store_true', dest='friendlyname',
-      help='Returns the friendly name for a given brick. This name is used in '
-      'the sysroot path and as "board name" in our legacy tools.')
-  options = parser.parse_args(argv)
-  options.Freeze()
-  return options
-
-
-def main(argv):
-  opts = ParseArgs(argv)
-
-  try:
-    brick = brick_lib.Brick(opts.brick, allow_legacy=False)
-  except brick_lib.BrickNotFound:
-    cros_build_lib.Die('Brick %s not found.' % opts.brick)
-
-  if opts.friendlyname:
-    print(brick.FriendlyName())
diff --git a/scripts/cros_generate_breakpad_symbols.py b/scripts/cros_generate_breakpad_symbols.py
index 2c7606b..92a242e 100644
--- a/scripts/cros_generate_breakpad_symbols.py
+++ b/scripts/cros_generate_breakpad_symbols.py
@@ -63,26 +63,26 @@
 
 
 def GenerateBreakpadSymbol(elf_file, debug_file=None, breakpad_dir=None,
-                           board=None, strip_cfi=False, num_errors=None):
+                           strip_cfi=False, num_errors=None,
+                           dump_syms_cmd='dump_syms'):
   """Generate the symbols for |elf_file| using |debug_file|
 
   Args:
     elf_file: The file to dump symbols for
     debug_file: Split debug file to use for symbol information
     breakpad_dir: The dir to store the output symbol file in
-    board: If |breakpad_dir| is not specified, use |board| to find it
     strip_cfi: Do not generate CFI data
     num_errors: An object to update with the error count (needs a .value member)
+    dump_syms_cmd: Command to use for dumping symbols.
 
   Returns:
-    The number of errors that were encountered.
+    The name of symbol file written out.
   """
-  if breakpad_dir is None:
-    breakpad_dir = FindBreakpadDir(board)
+  assert breakpad_dir
   if num_errors is None:
     num_errors = ctypes.c_int()
 
-  cmd_base = ['dump_syms']
+  cmd_base = [dump_syms_cmd]
   if strip_cfi:
     cmd_base += ['-c']
   # Some files will not be readable by non-root (e.g. set*id /bin/su).
@@ -151,7 +151,7 @@
     os.chmod(sym_file, 0o644)
     temp.delete = False
 
-  return num_errors.value
+  return sym_file
 
 
 def GenerateBreakpadSymbols(board, breakpad_dir=None, strip_cfi=False,
@@ -264,7 +264,7 @@
 
   # Now start generating symbols for the discovered elfs.
   with parallel.BackgroundTaskRunner(GenerateBreakpadSymbol,
-                                     breakpad_dir=breakpad_dir, board=board,
+                                     breakpad_dir=breakpad_dir,
                                      strip_cfi=strip_cfi,
                                      num_errors=bg_errors,
                                      processes=num_processes) as queue:
diff --git a/scripts/cros_generate_breakpad_symbols_unittest.py b/scripts/cros_generate_breakpad_symbols_unittest.py
index 9c27c50..9290feb 100644
--- a/scripts/cros_generate_breakpad_symbols_unittest.py
+++ b/scripts/cros_generate_breakpad_symbols_unittest.py
@@ -243,28 +243,20 @@
   def testNormal(self):
     """Normal run -- given an ELF and a debug file"""
     ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
-        self.elf_file, self.debug_file, breakpad_dir=self.breakpad_dir)
-    self.assertEqual(ret, 0)
+        self.elf_file, self.debug_file, self.breakpad_dir)
+    self.assertEqual(ret, self.sym_file)
     self.assertEqual(self.rc_mock.call_count, 1)
     self.assertCommandArgs(0, ['dump_syms', self.elf_file, self.debug_dir])
     self.assertExists(self.sym_file)
 
-  def testNormalBoard(self):
-    """Normal run w/board info but not breakpad dir"""
-    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
-        self.elf_file, board='foo')
-    self.assertEqual(ret, 0)
-    self.assertCommandArgs(0, ['dump_syms', self.elf_file])
-    self.assertEqual(self.rc_mock.call_count, 1)
-    self.assertExists(self.sym_file)
-
   def testNormalNoCfi(self):
     """Normal run w/out CFI"""
     # Make sure the num_errors flag works too.
     num_errors = ctypes.c_int()
     ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
-        self.elf_file, strip_cfi=True, num_errors=num_errors)
-    self.assertEqual(ret, 0)
+        self.elf_file, breakpad_dir=self.breakpad_dir,
+        strip_cfi=True, num_errors=num_errors)
+    self.assertEqual(ret, self.sym_file)
     self.assertEqual(num_errors.value, 0)
     self.assertCommandArgs(0, ['dump_syms', '-c', self.elf_file])
     self.assertEqual(self.rc_mock.call_count, 1)
@@ -272,8 +264,9 @@
 
   def testNormalElfOnly(self):
     """Normal run -- given just an ELF"""
-    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(self.elf_file)
-    self.assertEqual(ret, 0)
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, breakpad_dir=self.breakpad_dir)
+    self.assertEqual(ret, self.sym_file)
     self.assertCommandArgs(0, ['dump_syms', self.elf_file])
     self.assertEqual(self.rc_mock.call_count, 1)
     self.assertExists(self.sym_file)
@@ -282,17 +275,18 @@
     """Normal run where ELF is readable only by root"""
     with mock.patch.object(os, 'access') as mock_access:
       mock_access.return_value = False
-      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(self.elf_file)
-      self.assertEqual(ret, 0)
-      self.assertCommandArgs(0, ['sudo', '--', 'dump_syms', self.elf_file])
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+          self.elf_file, breakpad_dir=self.breakpad_dir)
+    self.assertEqual(ret, self.sym_file)
+    self.assertCommandArgs(0, ['sudo', '--', 'dump_syms', self.elf_file])
 
   def testLargeDebugFail(self):
     """Running w/large .debug failed, but retry worked"""
     self.rc_mock.AddCmdResult(['dump_syms', self.elf_file, self.debug_dir],
                               returncode=1)
     ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
-        self.elf_file, self.debug_file)
-    self.assertEqual(ret, 0)
+        self.elf_file, self.debug_file, self.breakpad_dir)
+    self.assertEqual(ret, self.sym_file)
     self.assertEqual(self.rc_mock.call_count, 2)
     self.assertCommandArgs(0, ['dump_syms', self.elf_file, self.debug_dir])
     self.assertCommandArgs(
@@ -307,8 +301,8 @@
                                self.debug_dir],
                               returncode=1)
     ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
-        self.elf_file, self.debug_file)
-    self.assertEqual(ret, 0)
+        self.elf_file, self.debug_file, self.breakpad_dir)
+    self.assertEqual(ret, self.sym_file)
     self.assertEqual(self.rc_mock.call_count, 3)
     self.assertCommandArgs(0, ['dump_syms', self.elf_file, self.debug_dir])
     self.assertCommandArgs(
@@ -319,12 +313,13 @@
   def testCompleteFail(self):
     """Running dump_syms always fails"""
     self.rc_mock.SetDefaultCmdResult(returncode=1)
-    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(self.elf_file)
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, breakpad_dir=self.breakpad_dir)
     self.assertEqual(ret, 1)
     # Make sure the num_errors flag works too.
     num_errors = ctypes.c_int()
     ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
-        self.elf_file, num_errors=num_errors)
+        self.elf_file, breakpad_dir=self.breakpad_dir, num_errors=num_errors)
     self.assertEqual(ret, 1)
     self.assertEqual(num_errors.value, 1)
 
diff --git a/scripts/cros_list_modified_packages.py b/scripts/cros_list_modified_packages.py
index b5327e9..7b9dfd1 100644
--- a/scripts/cros_list_modified_packages.py
+++ b/scripts/cros_list_modified_packages.py
@@ -34,7 +34,6 @@
   import queue as Queue
 
 from chromite.cbuildbot import constants
-from chromite.lib import brick_lib
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib
 from chromite.lib import cros_logging as logging
@@ -142,12 +141,6 @@
   vdb_path = os.path.join(sysroot.path, portage.const.VDB_PATH)
 
   for overlay in overlays:
-    # Is this a brick overlay? Get its source base directory.
-    brick_srcbase = ''
-    brick = brick_lib.FindBrickInPath(overlay)
-    if brick and brick.OverlayDir() == overlay.rstrip(os.path.sep):
-      brick_srcbase = brick.SourceDir()
-
     for filename, projects, srcpaths in portage_util.GetWorkonProjectMap(
         overlay, packages):
       # chromeos-base/power_manager/power_manager-9999
@@ -170,12 +163,9 @@
       # Get the modificaton time of the ebuild in the overlay.
       src_ebuild_mtime = os.lstat(os.path.join(overlay, filename)).st_mtime
 
-      # Translate relative srcpath values into their absolute counterparts.
-      full_srcpaths = [os.path.join(brick_srcbase, s) for s in srcpaths]
-
       # Write info into the results dictionary, overwriting any previous
       # values. This ensures that overlays override appropriately.
-      results[cp] = WorkonPackageInfo(cp, pkg_mtime, projects, full_srcpaths,
+      results[cp] = WorkonPackageInfo(cp, pkg_mtime, projects, srcpaths,
                                       src_ebuild_mtime)
 
   return results.values()
@@ -231,7 +221,6 @@
 
   target = parser.add_mutually_exclusive_group(required=True)
   target.add_argument('--board', help='Board name')
-  target.add_argument('--brick', help='Brick locator')
   target.add_argument('--host', default=False, action='store_true',
                       help='Look at host packages instead of board packages')
   target.add_argument('--sysroot', help='Sysroot path.')
@@ -245,12 +234,7 @@
   logging.getLogger().setLevel(logging.INFO)
   flags = _ParseArguments(argv)
   sysroot = None
-  if flags.brick:
-    try:
-      sysroot = cros_build_lib.GetSysroot(brick_lib.Brick(flags.brick))
-    except brick_lib.BrickNotFound:
-      cros_build_lib.Die('Could not load brick %s.' % flags.brick)
-  elif flags.board:
+  if flags.board:
     sysroot = cros_build_lib.GetSysroot(flags.board)
   elif flags.host:
     sysroot = '/'
diff --git a/scripts/cros_list_overlays.py b/scripts/cros_list_overlays.py
index 2cead1b..754a041 100644
--- a/scripts/cros_list_overlays.py
+++ b/scripts/cros_list_overlays.py
@@ -9,7 +9,6 @@
 import os
 
 from chromite.cbuildbot import constants
-from chromite.lib import brick_lib
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib
 from chromite.lib import portage_util
@@ -28,7 +27,6 @@
                            'only makes sense when --board is specified.')
   parser.add_argument('-a', '--all', default=False, action='store_true',
                       help='Show all overlays (even common ones).')
-  parser.add_argument('--brick', help='Main brick to use')
 
   opts = parser.parse_args(argv)
   opts.Freeze()
@@ -36,13 +34,6 @@
   if opts.primary_only and opts.board is None:
     parser.error('--board is required when --primary_only is supplied.')
 
-  if opts.brick:
-    if opts.board:
-      parser.error('--board and --brick are incompatible.')
-
-    if opts.all:
-      parser.error('Cannot list all overlays with --brick')
-
   return opts
 
 
@@ -50,21 +41,17 @@
   opts = _ParseArguments(argv)
   args = (constants.BOTH_OVERLAYS, opts.board)
 
-  if opts.brick:
-    main_brick = brick_lib.Brick(opts.brick)
-    overlays = [b.OverlayDir() for b in main_brick.BrickStack()]
-  else:
-    # Verify that a primary overlay exists.
-    try:
-      primary_overlay = portage_util.FindPrimaryOverlay(*args)
-    except portage_util.MissingOverlayException as ex:
-      cros_build_lib.Die(str(ex))
+  # Verify that a primary overlay exists.
+  try:
+    primary_overlay = portage_util.FindPrimaryOverlay(*args)
+  except portage_util.MissingOverlayException as ex:
+    cros_build_lib.Die(str(ex))
 
-    # Get the overlays to print.
-    if opts.primary_only:
-      overlays = [primary_overlay]
-    else:
-      overlays = portage_util.FindOverlays(*args)
+  # Get the overlays to print.
+  if opts.primary_only:
+    overlays = [primary_overlay]
+  else:
+    overlays = portage_util.FindOverlays(*args)
 
   # Exclude any overlays in src/third_party, for backwards compatibility with
   # scripts that expected these to not be listed.
diff --git a/scripts/cros_mark_as_stable.py b/scripts/cros_mark_as_stable.py
index 531f652..4b24953 100644
--- a/scripts/cros_mark_as_stable.py
+++ b/scripts/cros_mark_as_stable.py
@@ -79,12 +79,8 @@
 # TODO(build): This code needs to be gutted and rebased to cros_build_lib.
 def _DoWeHaveLocalCommits(stable_branch, tracking_branch, cwd):
   """Returns true if there are local commits."""
-  current_branch = git.GetCurrentBranch(cwd)
-
-  if current_branch != stable_branch:
-    return False
   output = git.RunGit(
-      cwd, ['rev-parse', 'HEAD', tracking_branch]).output.split()
+      cwd, ['rev-parse', stable_branch, tracking_branch]).output.split()
   return output[0] != output[1]
 
 
@@ -108,14 +104,23 @@
   Raises:
     OSError: Error occurred while pushing.
   """
+  if not git.DoesCommitExistInRepo(cwd, stable_branch):
+    logging.debug('No branch created for %s.  Exiting', cwd)
+    return
+
   if not _DoWeHaveLocalCommits(stable_branch, tracking_branch, cwd):
-    logging.info('No work found to push in %s.  Exiting', cwd)
+    logging.debug('No work found to push in %s.  Exiting', cwd)
     return
 
   # For the commit queue, our local branch may contain commits that were
   # just tested and pushed during the CommitQueueCompletion stage. Sync
   # and rebase our local branch on top of the remote commits.
-  remote_ref = git.GetTrackingBranch(cwd, for_push=True)
+  remote_ref = git.GetTrackingBranch(cwd,
+                                     branch=stable_branch,
+                                     for_push=True)
+  # SyncPushBranch rebases HEAD onto the updated remote. We need to checkout
+  # stable_branch here in order to update it.
+  git.RunGit(cwd, ['checkout', stable_branch])
   git.SyncPushBranch(cwd, remote_ref.remote, remote_ref.ref)
 
   # Check whether any local changes remain after the sync.
@@ -141,7 +146,8 @@
        '%s..%s' % (remote_ref.ref, stable_branch)]).output
   description = '%s\n\n%s' % (GIT_COMMIT_SUBJECT, description)
   logging.info('For %s, using description %s', cwd, description)
-  git.CreatePushBranch(constants.MERGE_BRANCH, cwd)
+  git.CreatePushBranch(constants.MERGE_BRANCH, cwd,
+                       remote_push_branch=remote_ref)
   git.RunGit(cwd, ['merge', '--squash', stable_branch])
   git.RunGit(cwd, ['commit', '-m', description])
   git.RunGit(cwd, ['config', 'push.default', 'tracking'])
diff --git a/scripts/cros_mark_as_stable_unittest.py b/scripts/cros_mark_as_stable_unittest.py
index 9d87e1b..bafb160 100644
--- a/scripts/cros_mark_as_stable_unittest.py
+++ b/scripts/cros_mark_as_stable_unittest.py
@@ -41,6 +41,7 @@
 
     git_log = 'Marking test_one as stable\nMarking test_two as stable\n'
     fake_description = 'Marking set of ebuilds as stable\n\n%s' % git_log
+    self.PatchObject(git, 'DoesCommitExistInRepo', return_value=True)
     self.PatchObject(cros_mark_as_stable, '_DoWeHaveLocalCommits',
                      return_value=True)
     self.PatchObject(cros_mark_as_stable.GitBranch, 'CreateBranch',
@@ -56,6 +57,8 @@
     create_mock = self.PatchObject(git, 'CreatePushBranch')
     git_mock = self.StartPatcher(RunGitMock())
 
+    git_mock.AddCmdResult(['checkout', self._branch])
+
     cmd = ['log', '--format=short', '--perl-regexp', '--author',
            '^(?!chrome-bot)', 'refs/remotes/gerrit/master..%s' % self._branch]
 
@@ -72,14 +75,13 @@
       git_mock.AddCmdResult(['commit', '-m', fake_description])
       git_mock.AddCmdResult(['config', 'push.default', 'tracking'])
 
-    try:
-      cros_mark_as_stable.PushChange(self._branch, self._target_manifest_branch,
-                                     False, '.')
-    finally:
-      sync_mock.assert_called_with('.', 'gerrit', 'refs/remotes/gerrit/master')
-      if not bad_cls:
-        push_mock.assert_called_with('merge_branch', '.', dryrun=False)
-        create_mock.assert_called_with('merge_branch', '.')
+    cros_mark_as_stable.PushChange(self._branch, self._target_manifest_branch,
+                                   False, '.')
+    sync_mock.assert_called_with('.', 'gerrit', 'refs/remotes/gerrit/master')
+    if not bad_cls:
+      push_mock.assert_called_with('merge_branch', '.', dryrun=False)
+      create_mock.assert_called_with('merge_branch', '.',
+                                     remote_push_branch=mock.ANY)
 
   def testPushChange(self):
     """Verify pushing changes works."""
diff --git a/scripts/cros_setup_toolchains.py b/scripts/cros_setup_toolchains.py
index 6cda170..ca6d311 100644
--- a/scripts/cros_setup_toolchains.py
+++ b/scripts/cros_setup_toolchains.py
@@ -18,7 +18,6 @@
 from chromite.lib import osutils
 from chromite.lib import parallel
 from chromite.lib import toolchain
-from chromite.lib import workspace_lib
 
 # Needs to be after chromite imports.
 import lddtree
@@ -569,8 +568,8 @@
     Dictionary of concrete targets and their toolchain tuples.
   """
   targets_wanted = set(targets_wanted)
-  if targets_wanted in (set(['boards']), set(['bricks'])):
-    # Only pull targets from the included boards/bricks.
+  if targets_wanted == set(['boards']):
+    # Only pull targets from the included boards.
     return {}
 
   all_targets = toolchain.GetAllTargets()
@@ -589,7 +588,7 @@
 
 
 def UpdateToolchains(usepkg, deleteold, hostonly, reconfig,
-                     targets_wanted, boards_wanted, bricks_wanted, root='/'):
+                     targets_wanted, boards_wanted, root='/'):
   """Performs all steps to create a synchronized toolchain enviroment.
 
   Args:
@@ -599,7 +598,6 @@
     reconfig: Reload crossdev config and reselect toolchains
     targets_wanted: All the targets to update
     boards_wanted: Load targets from these boards
-    bricks_wanted: Load targets from these bricks
     root: The root in which to install the toolchains.
   """
   targets, crossdev_targets, reconfig_targets = {}, {}, {}
@@ -608,12 +606,10 @@
     # work on bare systems where this is useful.
     targets = ExpandTargets(targets_wanted)
 
-    # Now re-add any targets that might be from this board/brick. This is to
+    # Now re-add any targets that might be from this board. This is to
     # allow unofficial boards to declare their own toolchains.
     for board in boards_wanted:
       targets.update(toolchain.GetToolchainsForBoard(board))
-    for brick in bricks_wanted:
-      targets.update(toolchain.GetToolchainsForBrick(brick))
 
     # First check and initialize all cross targets that need to be.
     for target in targets:
@@ -647,12 +643,9 @@
   """Show the toolchain tuples used by |name|
 
   Args:
-    name: The board name or brick locator to query.
+    name: The board name to query.
   """
-  if workspace_lib.IsLocator(name):
-    toolchains = toolchain.GetToolchainsForBrick(name)
-  else:
-    toolchains = toolchain.GetToolchainsForBoard(name)
+  toolchains = toolchain.GetToolchainsForBoard(name)
   # Make sure we display the default toolchain first.
   print(','.join(
       toolchain.FilterToolchains(toolchains, 'default', True).keys() +
@@ -1075,21 +1068,18 @@
   parser.add_argument('-t', '--targets',
                       dest='targets', default='sdk',
                       help="Comma separated list of tuples. Special keywords "
-                           "'host', 'sdk', 'boards', 'bricks' and 'all' are "
+                           "'host', 'sdk', 'boards', and 'all' are "
                            "allowed. Defaults to 'sdk'.")
   parser.add_argument('--include-boards', default='', metavar='BOARDS',
                       help='Comma separated list of boards whose toolchains we '
                            'will always include. Default: none')
-  parser.add_argument('--include-bricks', default='', metavar='BRICKS',
-                      help='Comma separated list of bricks whose toolchains we '
-                           'will always include. Default: none')
   parser.add_argument('--hostonly',
                       dest='hostonly', default=False, action='store_true',
                       help='Only setup the host toolchain. '
                            'Useful for bootstrapping chroot')
   parser.add_argument('--show-board-cfg', '--show-cfg',
                       dest='cfg_name', default=None,
-                      help='Board or brick to list toolchains tuples for')
+                      help='Board  to list toolchains tuples for')
   parser.add_argument('--create-packages',
                       action='store_true', default=False,
                       help='Build redistributable packages')
@@ -1110,8 +1100,6 @@
   targets_wanted = set(options.targets.split(','))
   boards_wanted = (set(options.include_boards.split(','))
                    if options.include_boards else set())
-  bricks_wanted = (set(options.include_bricks.split(','))
-                   if options.include_bricks else set())
 
   if options.cfg_name:
     ShowConfig(options.cfg_name)
@@ -1129,7 +1117,7 @@
     root = options.sysroot or '/'
     UpdateToolchains(options.usepkg, options.deleteold, options.hostonly,
                      options.reconfig, targets_wanted, boards_wanted,
-                     bricks_wanted, root=root)
+                     root=root)
     Crossdev.Save()
 
   return 0
diff --git a/scripts/cros_show_waterfall_layout.py b/scripts/cros_show_waterfall_layout.py
index 0b064a6..9bad367 100644
--- a/scripts/cros_show_waterfall_layout.py
+++ b/scripts/cros_show_waterfall_layout.py
@@ -61,7 +61,7 @@
 def main(argv):
   opts = _ParseArguments(argv)
 
-  site_config = config_lib.LoadConfigFromFile()
+  site_config = config_lib.GetConfig()
 
   layout = {}
   for config_name, config in site_config.iteritems():
diff --git a/scripts/cros_sysroot_utils.py b/scripts/cros_sysroot_utils.py
index a4d3fbf..614153e 100644
--- a/scripts/cros_sysroot_utils.py
+++ b/scripts/cros_sysroot_utils.py
@@ -10,7 +10,6 @@
 import os
 import sys
 
-from chromite.lib import brick_lib
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib
 from chromite.lib import sysroot_lib
@@ -33,7 +32,6 @@
   config = subparser.add_parser('generate-config')
   target = config.add_mutually_exclusive_group(required=True)
   target.add_argument('--board', help='Board to generate the config for.')
-  target.add_argument('--brick', help='Brick to generate the config for.')
   config.add_argument('--out-file', dest='out_file',
                       help='File to write into. If not specified, the '
                       'configuration will be printed to stdout.')
@@ -84,11 +82,7 @@
   if opts.command == 'create-wrappers':
     sysroot.CreateAllWrappers(opts.friendlyname)
   elif opts.command == 'generate-config':
-    if opts.brick:
-      config = sysroot.GenerateBrickConfig(
-          brick_lib.Brick(opts.brick).BrickStack())
-    else:
-      config = sysroot.GenerateBoardConfig(opts.board)
+    config = sysroot.GenerateBoardConfig(opts.board)
 
     output.write('\n' + config)
   elif opts.command == 'generate-make-conf':
diff --git a/scripts/cros_unittest.py b/scripts/cros_unittest.py
index 7510fca..e05ef90 100644
--- a/scripts/cros_unittest.py
+++ b/scripts/cros_unittest.py
@@ -13,7 +13,7 @@
 from chromite.scripts import cros
 
 
-class RunScriptTest(cros_test_lib.WorkspaceTestCase):
+class RunScriptTest(cros_test_lib.MockTempDirTestCase):
   """Test the main functionality."""
 
   def setUp(self):
diff --git a/scripts/cros_workon.py b/scripts/cros_workon.py
index 087c0a8..252ebc3 100644
--- a/scripts/cros_workon.py
+++ b/scripts/cros_workon.py
@@ -12,7 +12,6 @@
 
 from __future__ import print_function
 
-from chromite.lib import brick_lib
 from chromite.lib import commandline
 from chromite.lib import cros_build_lib
 from chromite.lib import terminal
@@ -23,7 +22,6 @@
   shared = commandline.SharedParser()
   shared.add_argument('--board', default=cros_build_lib.GetDefaultBoard(),
                       help='The board to set package keywords for.')
-  shared.add_argument('--brick', help='The brick to set package keywords for.')
   shared.add_argument('--host', default=False, action='store_true',
                       help='Uses the host instead of board')
   shared.add_argument('--remote', default='',
@@ -81,15 +79,8 @@
   elif options.board:
     friendly_name = options.board
     sysroot = cros_build_lib.GetSysroot(board=options.board)
-  elif options.brick:
-    brick = brick_lib.Brick(options.brick)
-    friendly_name = brick.FriendlyName()
-    # TODO(wiley) This is a hack.  It doesn't really make sense to calculate
-    #             the sysroot from a brick alone, since bricks are installed
-    #             into sysroots.  Revisit this when blueprints are working.
-    sysroot = cros_build_lib.GetSysroot(friendly_name)
   else:
-    cros_build_lib.Die('You must specify either --host, --board or --brick')
+    cros_build_lib.Die('You must specify either --host, --board')
 
   helper = workon_helper.WorkonHelper(sysroot, friendly_name)
   try:
diff --git a/scripts/gerrit.py b/scripts/gerrit.py
index deefe4d..0f66bea 100644
--- a/scripts/gerrit.py
+++ b/scripts/gerrit.py
@@ -417,7 +417,7 @@
 
 
 def UserActDeletedraft(opts, *args):
-  """Delete draft patch set <n> [n ...]"""
+  """Delete draft CL <n> [n ...]"""
   for arg in args:
     helper, cl = GetGerrit(opts, arg)
     helper.DeleteDraft(cl, dryrun=opts.dryrun)
diff --git a/scripts/parallel_emerge.py b/scripts/parallel_emerge.py
index 176ef34..c219965 100644
--- a/scripts/parallel_emerge.py
+++ b/scripts/parallel_emerge.py
@@ -1517,6 +1517,7 @@
   def _Status(self):
     """Print status."""
     current_time = time.time()
+    current_time_struct = time.localtime(current_time)
     no_output = True
 
     # Print interim output every minute if --show-output is used. Otherwise,
@@ -1563,7 +1564,9 @@
         if retries:
           line += "Retrying %s, " % (retries,)
       load = " ".join(str(x) for x in os.getloadavg())
-      line += ("[Time %dm%.1fs Load %s]" % (seconds / 60, seconds % 60, load))
+      line += ("[Time %s | Elapsed %dm%.1fs | Load %s]" % (
+          time.strftime('%H:%M:%S', current_time_struct), seconds / 60,
+          seconds % 60, load))
       self._Print(line)
 
   def _Finish(self, target):
diff --git a/scripts/pushimage.py b/scripts/pushimage.py
index 58c0ca2..342295f 100644
--- a/scripts/pushimage.py
+++ b/scripts/pushimage.py
@@ -49,6 +49,8 @@
     constants.IMAGE_TYPE_FACTORY,
     constants.IMAGE_TYPE_FIRMWARE,
     constants.IMAGE_TYPE_NV_LP0_FIRMWARE,
+    constants.IMAGE_TYPE_ACCESSORY_USBPD,
+    constants.IMAGE_TYPE_ACCESSORY_RWSIG,
     constants.IMAGE_TYPE_BASE,
 )
 
@@ -60,6 +62,10 @@
 class MissingBoardInstructions(Exception):
   """Raised when a board lacks any signer instructions."""
 
+  def __init__(self, board, image_type, input_insns):
+    Exception.__init__(self, 'Board %s lacks insns for %s image: %s not found' %
+                       (board, image_type, input_insns))
+
 
 class InputInsns(object):
   """Object to hold settings for a signable board.
@@ -68,18 +74,38 @@
   reads) is not exactly the same as the instruction file pushimage reads.
   """
 
-  def __init__(self, board):
+  def __init__(self, board, image_type=None):
+    """Initialization.
+
+    Args:
+      board: The board to look up details.
+      image_type: The type of image we will be signing (see --sign-types).
+    """
     self.board = board
 
     config = ConfigParser.ConfigParser()
     config.readfp(open(self.GetInsnFile('DEFAULT')))
+
     # What pushimage internally refers to as 'recovery', are the basic signing
     # instructions in practice, and other types are stacked on top.
+    if image_type is None:
+      image_type = constants.IMAGE_TYPE_RECOVERY
+    self.image_type = image_type
     input_insns = self.GetInsnFile(constants.IMAGE_TYPE_RECOVERY)
     if not os.path.exists(input_insns):
       # This board doesn't have any signing instructions.
-      raise MissingBoardInstructions(self.board)
+      raise MissingBoardInstructions(self.board, image_type, input_insns)
     config.readfp(open(input_insns))
+
+    if image_type is not None:
+      input_insns = self.GetInsnFile(image_type)
+      if not os.path.exists(input_insns):
+        # This type doesn't have any signing instructions.
+        raise MissingBoardInstructions(self.board, image_type, input_insns)
+
+      self.image_type = image_type
+      config.readfp(open(input_insns))
+
     self.cfg = config
 
   def GetInsnFile(self, image_type):
@@ -125,24 +151,82 @@
     """
     return self.SplitCfgField(self.cfg.get('insns', 'channel'))
 
-  def GetKeysets(self):
-    """Return the list of keysets to sign for this board."""
-    return self.SplitCfgField(self.cfg.get('insns', 'keyset'))
+  def GetKeysets(self, insns_merge=None):
+    """Return the list of keysets to sign for this board.
 
-  def OutputInsns(self, image_type, output_file, sect_insns, sect_general):
+    Args:
+      insns_merge: The additional section to look at over [insns].
+    """
+    # First load the default value from [insns.keyset] if available.
+    sections = ['insns']
+    # Then overlay the [insns.xxx.keyset] if requested.
+    if insns_merge is not None:
+      sections += [insns_merge]
+
+    keyset = ''
+    for section in sections:
+      try:
+        keyset = self.cfg.get(section, 'keyset')
+      except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
+        pass
+
+    # We do not perturb the order (e.g. using sorted() or making a set())
+    # because we want the behavior stable, and we want the input insns to
+    # explicitly control the order (since it has an impact on naming).
+    return self.SplitCfgField(keyset)
+
+  def GetAltInsnSets(self):
+    """Return the list of alternative insn sections."""
+    # We do not perturb the order (e.g. using sorted() or making a set())
+    # because we want the behavior stable, and we want the input insns to
+    # explicitly control the order (since it has an impact on naming).
+    ret = [x for x in self.cfg.sections() if x.startswith('insns.')]
+    return ret if ret else [None]
+
+  @staticmethod
+  def CopyConfigParser(config):
+    """Return a copy of a ConfigParser object.
+
+    The python guys broke the ability to use something like deepcopy:
+    https://bugs.python.org/issue16058
+    """
+    # Write the current config to a string io object.
+    data = cStringIO.StringIO()
+    config.write(data)
+    data.seek(0)
+
+    # Create a new ConfigParser from the serialized data.
+    ret = ConfigParser.ConfigParser()
+    ret.readfp(data)
+
+    return ret
+
+  def OutputInsns(self, output_file, sect_insns, sect_general,
+                  insns_merge=None):
     """Generate the output instruction file for sending to the signer.
 
+    The override order is (later has precedence):
+      [insns]
+      [insns_merge]  (should be named "insns.xxx")
+      sect_insns
+
     Note: The format of the instruction file pushimage outputs (and the signer
     reads) is not exactly the same as the instruction file pushimage reads.
 
     Args:
-      image_type: The type of image we will be signing (see --sign-types).
       output_file: The file to write the new instruction file to.
       sect_insns: Items to set/override in the [insns] section.
       sect_general: Items to set/override in the [general] section.
+      insns_merge: The alternative insns.xxx section to merge.
     """
-    config = ConfigParser.ConfigParser()
-    config.readfp(open(self.GetInsnFile(image_type)))
+    # Create a copy so we can clobber certain fields.
+    config = self.CopyConfigParser(self.cfg)
+    sect_insns = sect_insns.copy()
+
+    # Merge in the alternative insns section if need be.
+    if insns_merge is not None:
+      for k, v in config.items(insns_merge):
+        sect_insns.setdefault(k, v)
 
     # Clear channel entry in instructions file, ensuring we only get
     # one channel for the signer to look at.  Then provide all the
@@ -154,11 +238,15 @@
       for k, v in fields.iteritems():
         config.set(sect, k, v)
 
+    # Now prune the alternative sections.
+    for alt in self.GetAltInsnSets():
+      config.remove_section(alt)
+
     output = cStringIO.StringIO()
     config.write(output)
     data = output.getvalue()
     osutils.WriteFile(output_file, data)
-    logging.debug('generated insns file for %s:\n%s', image_type, data)
+    logging.debug('generated insns file for %s:\n%s', self.image_type, data)
 
 
 def MarkImageToBeSigned(ctx, tbs_base, insns_path, priority):
@@ -244,14 +332,13 @@
   try:
     input_insns = InputInsns(board)
   except MissingBoardInstructions as e:
-    logging.warning('board "%s" is missing base instruction file: %s', board, e)
+    logging.warning('Missing base instruction file: %s', e)
     logging.warning('not uploading anything for signing')
     return
   channels = input_insns.GetChannels()
 
-  # We want force_keysets as a set, and keysets as a list.
+  # We want force_keysets as a set.
   force_keysets = set(force_keysets)
-  keysets = list(force_keysets) if force_keysets else input_insns.GetKeysets()
 
   if mock:
     logging.info('Upload mode: mock; signers will not process anything')
@@ -279,7 +366,6 @@
   if dry_run:
     logging.info('DRY RUN MODE ACTIVE: NOTHING WILL BE UPLOADED')
   logging.info('Signing for channels: %s', ' '.join(channels))
-  logging.info('Signing for keysets : %s', ' '.join(keysets))
 
   instruction_urls = {}
 
@@ -303,6 +389,8 @@
     firmware_basename = _ImageNameBase(constants.IMAGE_TYPE_FIRMWARE)
     nv_lp0_firmware_basename = _ImageNameBase(
         constants.IMAGE_TYPE_NV_LP0_FIRMWARE)
+    acc_usbpd_basename = _ImageNameBase(constants.IMAGE_TYPE_ACCESSORY_USBPD)
+    acc_rwsig_basename = _ImageNameBase(constants.IMAGE_TYPE_ACCESSORY_RWSIG)
     test_basename = _ImageNameBase(constants.IMAGE_TYPE_TEST)
     base_basename = _ImageNameBase(constants.IMAGE_TYPE_BASE)
     hwqual_tarball = 'chromeos-hwqual-%s-%s.tar.bz2' % (board, versionrev)
@@ -335,6 +423,12 @@
 
         ('firmware_from_source.tar.bz2', nv_lp0_firmware_basename, 'tar.bz2',
          constants.IMAGE_TYPE_NV_LP0_FIRMWARE),
+
+        ('firmware_from_source.tar.bz2', acc_usbpd_basename, 'tar.bz2',
+         constants.IMAGE_TYPE_ACCESSORY_USBPD),
+
+        ('firmware_from_source.tar.bz2', acc_rwsig_basename, 'tar.bz2',
+         constants.IMAGE_TYPE_ACCESSORY_RWSIG),
     )
 
     # The following build artifacts are copied and marked for signing, if
@@ -387,66 +481,80 @@
 
     logging.debug('Files to sign: %s', files_to_sign)
     # Now go through the subset for signing.
-    for keyset in keysets:
-      logging.debug('\n\n#### KEYSET: %s ####\n', keyset)
-      sect_insns['keyset'] = keyset
-      for image_type, dst_name, suffix in files_to_sign:
-        dst_archive = '%s.%s' % (dst_name, suffix)
-        sect_general['archive'] = dst_archive
-        sect_general['type'] = image_type
+    for image_type, dst_name, suffix in files_to_sign:
+      try:
+        input_insns = InputInsns(board, image_type=image_type)
+      except MissingBoardInstructions as e:
+        logging.info('Nothing to sign: %s', e)
+        continue
 
-        # In the default/automatic mode, only flag files for signing if the
-        # archives were actually uploaded in a previous stage. This additional
-        # check can be removed in future once |sign_types| becomes a required
-        # argument.
-        # TODO: Make |sign_types| a required argument.
-        gs_artifact_path = os.path.join(dst_path, dst_archive)
-        exists = False
-        try:
-          exists = ctx.Exists(gs_artifact_path)
-        except gs.GSContextException:
-          unknown_error[0] = True
-          logging.error('Unknown error while checking %s', gs_artifact_path,
-                        exc_info=True)
-        if not exists:
-          logging.info('%s does not exist.  Nothing to sign.',
-                       gs_artifact_path)
-          continue
+      dst_archive = '%s.%s' % (dst_name, suffix)
+      sect_general['archive'] = dst_archive
+      sect_general['type'] = image_type
 
-        input_insn_path = input_insns.GetInsnFile(image_type)
-        if not os.path.exists(input_insn_path):
-          logging.info('%s does not exist.  Nothing to sign.', input_insn_path)
-          continue
+      # In the default/automatic mode, only flag files for signing if the
+      # archives were actually uploaded in a previous stage. This additional
+      # check can be removed in future once |sign_types| becomes a required
+      # argument.
+      # TODO: Make |sign_types| a required argument.
+      gs_artifact_path = os.path.join(dst_path, dst_archive)
+      exists = False
+      try:
+        exists = ctx.Exists(gs_artifact_path)
+      except gs.GSContextException:
+        unknown_error[0] = True
+        logging.error('Unknown error while checking %s', gs_artifact_path,
+                      exc_info=True)
+      if not exists:
+        logging.info('%s does not exist.  Nothing to sign.',
+                     gs_artifact_path)
+        continue
 
-        # Generate the insn file for this artifact that the signer will use,
-        # and flag it for signing.
-        with tempfile.NamedTemporaryFile(
-            bufsize=0, prefix='pushimage.insns.') as insns_path:
-          input_insns.OutputInsns(image_type, insns_path.name, sect_insns,
-                                  sect_general)
+      first_image = True
+      for alt_insn_set in input_insns.GetAltInsnSets():
+        # Figure out which keysets have been requested for this type.
+        # We sort the forced set so tests/runtime behavior is stable.
+        keysets = sorted(force_keysets)
+        if not keysets:
+          keysets = input_insns.GetKeysets(insns_merge=alt_insn_set)
+          if not keysets:
+            logging.warning('Skipping %s image signing due to no keysets',
+                            image_type)
 
-          gs_insns_path = '%s/%s' % (dst_path, dst_name)
-          if keyset != keysets[0]:
-            gs_insns_path += '-%s' % keyset
-          gs_insns_path += '.instructions'
+        for keyset in keysets:
+          sect_insns['keyset'] = keyset
 
-          try:
-            ctx.Copy(insns_path.name, gs_insns_path)
-          except gs.GSContextException:
-            unknown_error[0] = True
-            logging.error('Unknown error while uploading insns %s',
-                          gs_insns_path, exc_info=True)
-            continue
+          # Generate the insn file for this artifact that the signer will use,
+          # and flag it for signing.
+          with tempfile.NamedTemporaryFile(
+              bufsize=0, prefix='pushimage.insns.') as insns_path:
+            input_insns.OutputInsns(insns_path.name, sect_insns, sect_general,
+                                    insns_merge=alt_insn_set)
 
-          try:
-            MarkImageToBeSigned(ctx, tbs_base, gs_insns_path, priority)
-          except gs.GSContextException:
-            unknown_error[0] = True
-            logging.error('Unknown error while marking for signing %s',
-                          gs_insns_path, exc_info=True)
-            continue
-          logging.info('Signing %s image %s', image_type, gs_insns_path)
-          instruction_urls.setdefault(channel, []).append(gs_insns_path)
+            gs_insns_path = '%s/%s' % (dst_path, dst_name)
+            if not first_image:
+              gs_insns_path += '-%s' % keyset
+            first_image = False
+            gs_insns_path += '.instructions'
+
+            try:
+              ctx.Copy(insns_path.name, gs_insns_path)
+            except gs.GSContextException:
+              unknown_error[0] = True
+              logging.error('Unknown error while uploading insns %s',
+                            gs_insns_path, exc_info=True)
+              continue
+
+            try:
+              MarkImageToBeSigned(ctx, tbs_base, gs_insns_path, priority)
+            except gs.GSContextException:
+              unknown_error[0] = True
+              logging.error('Unknown error while marking for signing %s',
+                            gs_insns_path, exc_info=True)
+              continue
+            logging.info('Signing %s image with keyset %s at %s', image_type,
+                         keyset, gs_insns_path)
+            instruction_urls.setdefault(channel, []).append(gs_insns_path)
 
   if unknown_error[0]:
     raise PushError('hit some unknown error(s)', instruction_urls)
diff --git a/scripts/pushimage_unittest.py b/scripts/pushimage_unittest.py
index ae6f6c8..a423143 100644
--- a/scripts/pushimage_unittest.py
+++ b/scripts/pushimage_unittest.py
@@ -62,8 +62,8 @@
   def testOutputInsnsBasic(self):
     """Verify output instructions are sane"""
     exp_content = """[insns]
-keyset = stumpy-mp-v3
 channel = dev canary
+keyset = stumpy-mp-v3
 chromeos_shell = false
 ensure_no_password = true
 firmware_update = true
@@ -74,8 +74,9 @@
 """
 
     insns = pushimage.InputInsns('test.board')
+    self.assertEqual(insns.GetAltInsnSets(), [None])
     m = self.PatchObject(osutils, 'WriteFile')
-    insns.OutputInsns('recovery', '/bogus', {}, {})
+    insns.OutputInsns('/bogus', {}, {})
     self.assertTrue(m.called)
     content = m.call_args_list[0][0][1]
     self.assertEqual(content.rstrip(), exp_content.rstrip())
@@ -83,8 +84,8 @@
   def testOutputInsnsReplacements(self):
     """Verify output instructions can be updated"""
     exp_content = """[insns]
-keyset = batman
 channel = dev
+keyset = batman
 chromeos_shell = false
 ensure_no_password = true
 firmware_update = true
@@ -106,11 +107,62 @@
 
     insns = pushimage.InputInsns('test.board')
     m = self.PatchObject(osutils, 'WriteFile')
-    insns.OutputInsns('recovery', '/a/file', sect_insns, sect_general)
+    insns.OutputInsns('/a/file', sect_insns, sect_general)
     self.assertTrue(m.called)
     content = m.call_args_list[0][0][1]
     self.assertEqual(content.rstrip(), exp_content.rstrip())
 
+  def testOutputInsnsMergeAlts(self):
+    """Verify handling of alternative insns.xxx sections"""
+    TEMPLATE_CONTENT = """[insns]
+channel = %(channel)s
+chromeos_shell = false
+ensure_no_password = true
+firmware_update = true
+security_checks = true
+create_nplusone = true
+override = sect_insns
+keyset = %(keyset)s
+%(extra)s
+[general]
+board = board
+config_board = test.board
+"""
+
+    exp_alts = ['insns.one', 'insns.two', 'insns.hotsoup']
+    exp_fields = {
+        'one': {'channel': 'dev canary', 'keyset': 'OneKeyset', 'extra': ''},
+        'two': {'channel': 'best', 'keyset': 'TwoKeyset', 'extra': ''},
+        'hotsoup': {
+            'channel': 'dev canary',
+            'keyset': 'ColdKeyset',
+            'extra': 'soup = cheddar\n',
+        },
+    }
+
+    # Make sure this overrides the insn sections.
+    sect_insns = {
+        'override': 'sect_insns',
+    }
+    sect_insns_copy = sect_insns.copy()
+    sect_general = {
+        'config_board': 'test.board',
+        'board': 'board',
+    }
+
+    insns = pushimage.InputInsns('test.multi')
+    self.assertEqual(insns.GetAltInsnSets(), exp_alts)
+    m = self.PatchObject(osutils, 'WriteFile')
+
+    for alt in exp_alts:
+      m.reset_mock()
+      insns.OutputInsns('/a/file', sect_insns, sect_general, insns_merge=alt)
+      self.assertEqual(sect_insns, sect_insns_copy)
+      self.assertTrue(m.called)
+      content = m.call_args_list[0][0][1]
+      exp_content = TEMPLATE_CONTENT % exp_fields[alt[6:]]
+      self.assertEqual(content.rstrip(), exp_content.rstrip())
+
 
 class MarkImageToBeSignedTest(gs_unittest.AbstractGSContextTest):
   """Tests for MarkImageToBeSigned()"""
@@ -233,7 +285,7 @@
     with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
       urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
                                  sign_types=['recovery'])
-    self.assertEqual(self.gs_mock.call_count, 22)
+    self.assertEqual(self.gs_mock.call_count, 26)
     self.assertTrue(self.mark_mock.called)
     self.assertEqual(urls, EXPECTED)
 
@@ -251,7 +303,7 @@
     with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
       urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
                                  sign_types=['base'])
-    self.assertEqual(self.gs_mock.call_count, 24)
+    self.assertEqual(self.gs_mock.call_count, 28)
     self.assertTrue(self.mark_mock.called)
     self.assertEqual(urls, EXPECTED)
 
@@ -259,7 +311,7 @@
     """Verify nothing is signed when we request an unavailable type"""
     urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
                                sign_types=['nononononono'])
-    self.assertEqual(self.gs_mock.call_count, 20)
+    self.assertEqual(self.gs_mock.call_count, 24)
     self.assertFalse(self.mark_mock.called)
     self.assertEqual(urls, {})
 
@@ -271,6 +323,55 @@
       self.assertRaises(pushimage.PushError, pushimage.PushImage, '/src',
                         'test.board', 'R34-5126.0.0')
 
+  def testMultipleKeysets(self):
+    """Verify behavior when processing an insn w/multiple keysets"""
+    EXPECTED = {
+        'canary': [
+            ('gs://chromeos-releases/canary-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board.instructions'),
+            ('gs://chromeos-releases/canary-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-key2.instructions'),
+            ('gs://chromeos-releases/canary-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-key3.instructions'),
+        ],
+        'dev': [
+            ('gs://chromeos-releases/dev-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board.instructions'),
+            ('gs://chromeos-releases/dev-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-key2.instructions'),
+            ('gs://chromeos-releases/dev-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-key3.instructions'),
+        ],
+    }
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
+                                 force_keysets=('key1', 'key2', 'key3'))
+    self.assertEqual(urls, EXPECTED)
+
+  def testMultipleAltInsns(self):
+    """Verify behavior when processing an insn w/multiple insn overlays"""
+    EXPECTED = {
+        'canary': [
+            ('gs://chromeos-releases/canary-channel/test.multi/1.0.0/'
+             'ChromeOS-recovery-R1-1.0.0-test.multi.instructions'),
+            ('gs://chromeos-releases/canary-channel/test.multi/1.0.0/'
+             'ChromeOS-recovery-R1-1.0.0-test.multi-TwoKeyset.instructions'),
+            ('gs://chromeos-releases/canary-channel/test.multi/1.0.0/'
+             'ChromeOS-recovery-R1-1.0.0-test.multi-ColdKeyset.instructions'),
+        ],
+        'dev': [
+            ('gs://chromeos-releases/dev-channel/test.multi/1.0.0/'
+             'ChromeOS-recovery-R1-1.0.0-test.multi.instructions'),
+            ('gs://chromeos-releases/dev-channel/test.multi/1.0.0/'
+             'ChromeOS-recovery-R1-1.0.0-test.multi-TwoKeyset.instructions'),
+            ('gs://chromeos-releases/dev-channel/test.multi/1.0.0/'
+             'ChromeOS-recovery-R1-1.0.0-test.multi-ColdKeyset.instructions'),
+        ],
+    }
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      urls = pushimage.PushImage('/src', 'test.multi', 'R1-1.0.0')
+    self.assertEqual(urls, EXPECTED)
+
 
 class MainTests(cros_test_lib.MockTestCase):
   """Tests for main()"""
@@ -289,4 +390,4 @@
   signing.INPUT_INSN_DIR = signing.TEST_INPUT_INSN_DIR
 
   # Run the tests.
-  cros_test_lib.main(level='info', module=__name__)
+  cros_test_lib.main(level='notice', module=__name__)
diff --git a/scripts/summarize_build_stats.py b/scripts/summarize_build_stats.py
index 404a18d..a4a965a 100644
--- a/scripts/summarize_build_stats.py
+++ b/scripts/summarize_build_stats.py
@@ -7,6 +7,7 @@
 from __future__ import print_function
 
 import datetime
+import itertools
 import numpy
 import re
 import sys
@@ -14,6 +15,7 @@
 from chromite.cbuildbot import constants
 from chromite.lib import cidb
 from chromite.lib import commandline
+from chromite.lib import cros_build_lib
 from chromite.lib import cros_logging as logging
 
 
@@ -36,6 +38,8 @@
     self.blames = {}
     self.summary = {}
     self.builds_by_build_id = {}
+    self.slave_builds_by_master_id = {}
+    self.slave_builds_by_config = {}
 
   def GatherBuildAnnotations(self):
     """Gather the failure annotations for builds from cidb."""
@@ -158,6 +162,16 @@
     self.builds_by_build_id.update(
         {b['id'] : b for b in self.builds})
 
+    # Gather slave statuses for each of the master builds. For now this is a
+    # separate query per CQ run, but this could be consolidated to a single
+    # query if necessary (requires adding a cidb.py API method).
+    for bid in self.builds_by_build_id:
+      self.slave_builds_by_master_id[bid] = self.db.GetSlaveStatuses(bid)
+
+    self.slave_builds_by_config = cros_build_lib.GroupByKey(
+        itertools.chain(*self.slave_builds_by_master_id.values()),
+        'build_config')
+
   def _PrintCounts(self, reasons, fmt):
     """Print a sorted list of reasons in descending order of frequency.
 
@@ -199,7 +213,23 @@
       )
     return false_rejection_rate
 
-  def Summarize(self, build_type):
+  def GetBuildRunTimes(self, builds):
+    """Gets the elapsed run times of the completed builds within |builds|.
+
+    Args:
+      builds: Iterable of build statuses as returned by cidb.
+
+    Returns:
+      A list of the elapsed times (in seconds) of the builds that completed.
+    """
+    times = []
+    for b in builds:
+      if b['finish_time']:
+        td = (b['finish_time'] - b['start_time']).total_seconds()
+        times.append(td)
+    return times
+
+  def Summarize(self, build_type, bad_patch_candidates=False):
     """Process, print, and return a summary of statistics.
 
     As a side effect, save summary to self.summary.
@@ -208,11 +238,11 @@
       A dictionary summarizing the statistics.
     """
     if build_type == 'cq':
-      return self.SummarizeCQ()
+      return self.SummarizeCQ(bad_patch_candidates=bad_patch_candidates)
     else:
       return self.SummarizePFQ()
 
-  def SummarizeCQ(self):
+  def SummarizeCQ(self, bad_patch_candidates=False):
     """Process, print, and return a summary of cl action statistics.
 
     As a side effect, save summary to self.summary.
@@ -230,6 +260,8 @@
     else:
       logging.info('No runs included.')
 
+    build_times_sec = sorted(self.GetBuildRunTimes(self.builds))
+
     build_reason_counts = {}
     for reasons in self.reasons.values():
       for reason in reasons:
@@ -237,8 +269,11 @@
           build_reason_counts[reason] = build_reason_counts.get(reason, 0) + 1
 
     unique_blames = set()
+    build_blame_counts = {}
     for blames in self.blames.itervalues():
       unique_blames.update(blames)
+      for blame in blames:
+        build_blame_counts[blame] = build_blame_counts.get(blame, 0) + 1
     unique_cl_blames = {blame for blame in unique_blames if
                         EXTERNAL_CL_BASE_URL in blame}
 
@@ -290,6 +325,23 @@
       for x in range(max(rejection_counts) + 1):
         good_patch_rejection_breakdown.append((x, rejection_counts.count(x)))
 
+    # For CQ runs that passed, track which slave was the long pole, i.e. the
+    # last to finish.
+    long_pole_slave_counts = {}
+    for bid, master_build in self.builds_by_build_id.items():
+      if master_build['status'] == constants.BUILDER_STATUS_PASSED:
+        if not self.slave_builds_by_master_id[bid]:
+          continue
+        # TODO(akeshet): The set of slaves also includes non-important slaves
+        # (there is no distinction in cidb between important and non-important).
+        # To protect max(...) from hitting any None values we need the if check
+        # below. Revisit this once we can filter out non-important slaves.
+        _, long_config = max((slave['finish_time'], slave['build_config'])
+                             for slave in self.slave_builds_by_master_id[bid]
+                             if slave['finish_time'])
+        long_pole_slave_counts[long_config] = (
+            long_pole_slave_counts.get(long_config, 0) + 1)
+
     summary = {
         'total_cl_actions': len(self.claction_history),
         'unique_cls': len(self.claction_history.affected_cls),
@@ -306,6 +358,7 @@
         'patch_handling_time': patch_handle_times,
         'bad_cl_candidates': bad_cl_candidates,
         'unique_blames_change_count': len(unique_cl_blames),
+        'long_pole_slave_counts': long_pole_slave_counts,
     }
 
     logging.info('CQ committed %s changes', summary['submitted_patches'])
@@ -388,11 +441,19 @@
                  numpy.percentile(cq_handle_times, 90) / 3600.0)
     logging.info('')
 
+    # Log some statistics about cq-master run-time.
+    logging.info('CQ-master run time:')
+    logging.info('  50th percentile: %.2f hours',
+                 numpy.percentile(build_times_sec, 50) / 3600.0)
+    logging.info('  90th percenfile: %.2f hours',
+                 numpy.percentile(build_times_sec, 90) / 3600.0)
+
     for bot_type, patches in summary['bad_cl_candidates'].items():
       logging.info('%d bad patch candidates were rejected by the %s',
                    len(patches), bot_type)
-      for k in patches:
-        logging.info('Bad patch candidate in: %s', k)
+      if bad_patch_candidates:
+        for k in patches:
+          logging.info('Bad patch candidate in: %s', k)
 
     fmt_fai = '  %(cnt)d failures in %(reason)s'
     fmt_rej = '  %(cnt)d rejections due to %(reason)s'
@@ -406,6 +467,21 @@
     logging.info('Reasons why builds failed:')
     self._PrintCounts(build_reason_counts, fmt_fai)
 
+    logging.info('Bugs or CLs responsible for build failures:')
+    self._PrintCounts(build_blame_counts, fmt_fai)
+
+    total_counts = sum(long_pole_slave_counts.values())
+    logging.info('Slowest CQ slaves out of %s passing runs:', total_counts)
+    for (count, config) in sorted(
+        (v, k) for (k, v) in long_pole_slave_counts.items()):
+      if count < (total_counts / 20.0):
+        continue
+      build_times = self.GetBuildRunTimes(self.slave_builds_by_config[config])
+      logging.info('%s times the slowest slave was %s', count, config)
+      logging.info('  50th percentile: %.2f hours, 90th percentile: %.2f hours',
+                   numpy.percentile(build_times, 50) / 3600.0,
+                   numpy.percentile(build_times, 90) / 3600.0)
+
     return summary
 
   # TODO(akeshet): some of this logic is copied directly from SummarizeCQ.
@@ -490,6 +566,10 @@
   parser.add_argument('--build-type', choices=['cq', 'chrome-pfq'],
                       default='cq',
                       help='Build type to summarize. Default: cq.')
+  parser.add_argument('--bad-patch-candidates', action='store_true',
+                      default=False,
+                      help='In CQ mode, whether to print bad patch '
+                           'candidates.')
   return parser
 
 
@@ -527,4 +607,5 @@
   cl_stats_engine = CLStatsEngine(db)
   cl_stats_engine.Gather(start_date, end_date, master_config,
                          starting_build_number=options.starting_build)
-  cl_stats_engine.Summarize(options.build_type)
+  cl_stats_engine.Summarize(options.build_type,
+                            options.bad_patch_candidates)
diff --git a/scripts/summarize_build_stats_unittest.py b/scripts/summarize_build_stats_unittest.py
index c6a417b..f6442af 100644
--- a/scripts/summarize_build_stats_unittest.py
+++ b/scripts/summarize_build_stats_unittest.py
@@ -177,6 +177,7 @@
           'good_patch_rejection_count': {CQ: 1, PRE_CQ: 1},
           'good_patch_rejections': 2,
           'false_rejection_rate': {CQ: 20., PRE_CQ: 20., 'combined': 100. / 3},
+          'long_pole_slave_counts': {},
           'submitted_patches': 4,
           'submit_fails': 0,
           'unique_cls': 4,
diff --git a/scripts/upload_symbols.py b/scripts/upload_symbols.py
index 2a912a2..8e94e80 100644
--- a/scripts/upload_symbols.py
+++ b/scripts/upload_symbols.py
@@ -91,8 +91,8 @@
 
 # The unique namespace in the dedupe server that only we use.  Helps avoid
 # collisions with all the hashed values and unrelated content.
-OFFICIAL_DEDUPE_NAMESPACE = 'chromium-os-upload-symbols'
-STAGING_DEDUPE_NAMESPACE = '%s-staging' % OFFICIAL_DEDUPE_NAMESPACE
+OFFICIAL_DEDUPE_NAMESPACE_TMPL = '%s-upload-symbols'
+STAGING_DEDUPE_NAMESPACE_TMPL = '%s-staging' % OFFICIAL_DEDUPE_NAMESPACE_TMPL
 
 
 # The minimum average rate (in bytes per second) that we expect to maintain
@@ -167,7 +167,7 @@
   return max(os.path.getsize(path) / UPLOAD_MIN_RATE, UPLOAD_MIN_TIMEOUT)
 
 
-def SymUpload(upload_url, sym_item):
+def SymUpload(upload_url, sym_item, product_name):
   """Upload a symbol file to a HTTP server
 
   The upload is a multipart/form-data POST with the following parameters:
@@ -186,6 +186,7 @@
   Args:
     upload_url: The crash URL to POST the |sym_file| to
     sym_item: A SymbolItem containing the path to the breakpad symbol to upload
+    product_name: A string for stats purposes. Usually 'ChromeOS' or 'Android'.
   """
   sym_header = sym_item.sym_header
   sym_file = sym_item.sym_file
@@ -202,7 +203,7 @@
       # Not sure what to set for the version.  Maybe the git sha1 of this file.
       # Note: the server restricts this to 30 chars.
       #('version', None),
-      ('product', 'ChromeOS'),
+      ('product', product_name),
       ('os', sym_header.os),
       ('cpu', sym_header.cpu),
       poster.encode.MultipartParam.from_file('symbol_file', sym_file),
@@ -214,7 +215,7 @@
   urllib2.urlopen(request, timeout=GetUploadTimeout(sym_file))
 
 
-def TestingSymUpload(upload_url, sym_item):
+def TestingSymUpload(upload_url, sym_item, _product_name):
   """A stub version of SymUpload for --testing usage"""
   cmd = ['sym_upload', sym_item.sym_file, upload_url]
   # Randomly fail 80% of the time (the retry logic makes this 80%/3 per file).
@@ -277,9 +278,9 @@
     _Update()
 
 
-def UploadSymbol(upload_url, symbol_element, file_limit=DEFAULT_FILE_LIMIT,
-                 sleep=0, num_errors=None, watermark_errors=None,
-                 failed_queue=None, passed_queue=None):
+def UploadSymbol(upload_url, symbol_element, product_name,
+                 file_limit=DEFAULT_FILE_LIMIT, sleep=0, num_errors=None,
+                 watermark_errors=None, failed_queue=None, passed_queue=None):
   """Upload |sym_element.symbol_item| to |upload_url|
 
   Args:
@@ -289,6 +290,7 @@
                     to upload. symbol_element.opaque_push_state is an object of
                     _IsolateServerPushState or None if the item doesn't have
                     a push state.
+    product_name: A string for stats purposes. Usually 'ChromeOS' or 'Android'.
     file_limit: The max file size of a symbol file before we try to strip it
     sleep: Number of seconds to sleep before running
     num_errors: An object to update with the error count (needs a .value member)
@@ -349,7 +351,7 @@
       cros_build_lib.TimedCommand(
           retry_util.RetryException,
           (urllib2.HTTPError, urllib2.URLError), MAX_RETRIES, SymUpload,
-          upload_url, upload_item, sleep=INITIAL_RETRY_DELAY,
+          upload_url, upload_item, product_name, sleep=INITIAL_RETRY_DELAY,
           timed_log_msg=('upload of %10i bytes took %%(delta)s: %s' %
                          (file_size, os.path.basename(sym_file))))
       success = True
@@ -573,7 +575,8 @@
 def UploadSymbols(board=None, official=False, server=None, breakpad_dir=None,
                   file_limit=DEFAULT_FILE_LIMIT, sleep=DEFAULT_SLEEP_DELAY,
                   upload_limit=None, sym_paths=None, failed_list=None,
-                  root=None, retry=True, dedupe_namespace=None):
+                  root=None, retry=True, dedupe_namespace=None,
+                  product_name='ChromeOS'):
   """Upload all the generated symbols for |board| to the crash server
 
   You can use in a few ways:
@@ -596,6 +599,7 @@
     root: The tree to prefix to |breakpad_dir| (if |breakpad_dir| is not set)
     retry: Whether we should retry failures.
     dedupe_namespace: The isolateserver namespace to dedupe uploaded symbols.
+    product_name: A string for stats purposes. Usually 'ChromeOS' or 'Android'.
 
   Returns:
     The number of errors that were encountered.
@@ -650,9 +654,10 @@
   watermark_errors = multiprocessing.Value('f')
   failed_queue = multiprocessing.Queue()
   uploader = functools.partial(
-      UploadSymbol, upload_url, file_limit=file_limit, sleep=sleep,
-      num_errors=bg_errors, watermark_errors=watermark_errors,
-      failed_queue=failed_queue, passed_queue=dedupe_queue)
+      UploadSymbol, upload_url, product_name=product_name,
+      file_limit=file_limit, sleep=sleep, num_errors=bg_errors,
+      watermark_errors=watermark_errors, failed_queue=failed_queue,
+      passed_queue=dedupe_queue)
 
   start_time = datetime.datetime.now()
   Counters = cros_build_lib.Collection(
@@ -804,7 +809,7 @@
   parser.add_argument('sym_paths', type='path_or_uri', nargs='*', default=None,
                       help='symbol file or directory or URL or tarball')
   parser.add_argument('--board', default=None,
-                      help='board to build packages for')
+                      help='Used to find default breakpad_root.')
   parser.add_argument('--breakpad_root', type='path', default=None,
                       help='full path to the breakpad symbol directory')
   parser.add_argument('--root', type='path', default=None,
@@ -828,20 +833,20 @@
                       help='run in testing mode')
   parser.add_argument('--yes', action='store_true', default=False,
                       help='answer yes to all prompts')
+  parser.add_argument('--product_name', type=str, default='ChromeOS',
+                      help='Produce Name for breakpad stats.')
 
   opts = parser.parse_args(argv)
   opts.Freeze()
 
-  if opts.sym_paths:
+  if opts.sym_paths or opts.breakpad_root:
     if opts.regenerate:
-      cros_build_lib.Die('--regenerate may not be used with specific files')
+      cros_build_lib.Die('--regenerate may not be used with specific files, '
+                         'or breakpad_root')
   else:
     if opts.board is None:
       cros_build_lib.Die('--board is required')
 
-  if opts.breakpad_root and opts.regenerate:
-    cros_build_lib.Die('--regenerate may not be used with --breakpad_root')
-
   if opts.testing:
     # TODO(build): Kill off --testing mode once unittests are up-to-snuff.
     logging.info('running in testing mode')
@@ -853,9 +858,9 @@
   dedupe_namespace = None
   if opts.dedupe:
     if opts.official_build and not opts.testing:
-      dedupe_namespace = OFFICIAL_DEDUPE_NAMESPACE
+      dedupe_namespace = OFFICIAL_DEDUPE_NAMESPACE_TMPL % opts.product_name
     else:
-      dedupe_namespace = STAGING_DEDUPE_NAMESPACE
+      dedupe_namespace = STAGING_DEDUPE_NAMESPACE_TMPL % opts.product_name
 
   if not opts.yes:
     prolog = '\n'.join(textwrap.wrap(textwrap.dedent("""
@@ -880,7 +885,8 @@
                        file_limit=opts.strip_cfi, sleep=DEFAULT_SLEEP_DELAY,
                        upload_limit=opts.upload_limit, sym_paths=opts.sym_paths,
                        failed_list=opts.failed_list, root=opts.root,
-                       dedupe_namespace=dedupe_namespace)
+                       dedupe_namespace=dedupe_namespace,
+                       product_name=opts.product_name)
   if ret:
     logging.error('encountered %i problem(s)', ret)
     # Since exit(status) gets masked, clamp it to 1 so we don't inadvertently
diff --git a/scripts/upload_symbols_unittest.py b/scripts/upload_symbols_unittest.py
index c8f3eb4..62dd0d9 100644
--- a/scripts/upload_symbols_unittest.py
+++ b/scripts/upload_symbols_unittest.py
@@ -380,9 +380,9 @@
     """Verify we try to upload on a normal file"""
     osutils.Touch(self.sym_file)
     sym_element = upload_symbols.SymbolElement(self.sym_item, None)
-    ret = upload_symbols.UploadSymbol(self.url, sym_element)
+    ret = upload_symbols.UploadSymbol(self.url, sym_element, 'TestProduct')
     self.assertEqual(ret, 0)
-    self.upload_mock.assert_called_with(self.url, self.sym_item)
+    self.upload_mock.assert_called_with(self.url, self.sym_item, 'TestProduct')
     self.assertEqual(self.upload_mock.call_count, 1)
 
   def testUploadSymbolErrorCountExceeded(self):
@@ -390,8 +390,8 @@
     errors = ctypes.c_int(10000)
     # Pass in garbage values so that we crash if num_errors isn't handled.
     ret = upload_symbols.UploadSymbol(
-        None, upload_symbols.SymbolElement(self.sym_item, None), sleep=None,
-        num_errors=errors)
+        None, upload_symbols.SymbolElement(self.sym_item, None), 'TestProduct',
+        sleep=None, num_errors=errors)
     self.assertEqual(ret, 0)
 
   def testUploadRetryErrors(self, side_effect=None):
@@ -402,9 +402,10 @@
     errors = ctypes.c_int()
     item = upload_symbols.FakeItem(sym_file='/dev/null')
     element = upload_symbols.SymbolElement(item, None)
-    ret = upload_symbols.UploadSymbol(self.url, element, num_errors=errors)
+    ret = upload_symbols.UploadSymbol(self.url, element, 'TestProduct',
+                                      num_errors=errors)
     self.assertEqual(ret, 1)
-    self.upload_mock.assert_called_with(self.url, item)
+    self.upload_mock.assert_called_with(self.url, item, 'TestProduct')
     self.assertTrue(self.upload_mock.call_count >= upload_symbols.MAX_RETRIES)
 
   def testConnectRetryErrors(self):
@@ -426,7 +427,7 @@
     osutils.WriteFile(self.sym_file, content)
     ret = upload_symbols.UploadSymbol(
         self.url, upload_symbols.SymbolElement(self.sym_item, None),
-        file_limit=1)
+        'TestProduct', file_limit=1)
     self.assertEqual(ret, 0)
     # Make sure the item passed to the upload has a temp file and not the
     # original -- only the temp one has been stripped down.
@@ -443,7 +444,8 @@
       f.write('STACK CFI 1234\n\n')
     ret = upload_symbols.UploadSymbol(
         self.url,
-        upload_symbols.SymbolElement(self.sym_item, None))
+        upload_symbols.SymbolElement(self.sym_item, None),
+        'TestProduct')
     self.assertEqual(ret, 0)
     # Make sure the item passed to the upload has a temp file and not the
     # original -- only the temp one has been truncated.
@@ -468,7 +470,7 @@
   def testPostUpload(self):
     """Verify HTTP POST has all the fields we need"""
     m = self.PatchObject(urllib2, 'urlopen', autospec=True)
-    upload_symbols.SymUpload(self.SYM_URL, self.sym_item)
+    upload_symbols.SymUpload(self.SYM_URL, self.sym_item, 'TestProduct')
     self.assertEquals(m.call_count, 1)
     req = m.call_args[0][0]
     self.assertEquals(req.get_full_url(), self.SYM_URL)
@@ -503,7 +505,7 @@
         (50 * 1024 * 1024, 257),
     )
     for size.return_value, timeout in tests:
-      upload_symbols.SymUpload(self.SYM_URL, self.sym_item)
+      upload_symbols.SymUpload(self.SYM_URL, self.sym_item, 'TestProduct')
       self.assertEqual(m.call_args[1]['timeout'], timeout)
 
 
diff --git a/scripts/wrapper.py b/scripts/wrapper.py
index 2715ad8..8153a85 100755
--- a/scripts/wrapper.py
+++ b/scripts/wrapper.py
@@ -12,6 +12,7 @@
 
 from __future__ import print_function
 
+import imp
 import os
 import sys
 
@@ -96,6 +97,8 @@
   $ ln -s $PWD/cbuildbot/cbuildbot ~/bin/; cbuildbot --help
   # No $PATH needed, but a relative symlink to a symlink to the chromite dir.
   $ cd ~; ln -s bin/cbuildbot ./; ./cbuildbot --help
+  # External chromite module
+  $ ln -s ../chromite/scripts/wrapper.py foo; ./foo
 
   Args:
     target: Path to the script we're trying to run.
@@ -103,6 +106,8 @@
   Returns:
     The module main functor.
   """
+  # We assume/require the script we're wrapping ends in a .py.
+  full_path = target + '.py'
   while True:
     # Walk back one symlink at a time until we get into the chromite dir.
     parent, base = os.path.split(target)
@@ -111,21 +116,38 @@
       target = base
       break
     target = os.path.join(os.path.dirname(target), os.readlink(target))
-  assert parent.startswith(CHROMITE_PATH), (
-      'could not figure out leading path\n'
-      '\tparent: %s\n'
-      '\tCHROMITE_PATH: %s' % (parent, CHROMITE_PATH))
-  parent = parent[len(CHROMITE_PATH):].split(os.sep)
-  target = ['chromite'] + parent + [target]
 
-  if target[-2] == 'bin':
-    # Convert <path>/bin/foo -> <path>/scripts/foo.
-    target[-2] = 'scripts'
-  elif target[1] == 'bootstrap' and len(target) == 3:
-    # Convert <git_repo>/bootstrap/foo -> <git_repo>/bootstrap/scripts/foo.
-    target.insert(2, 'scripts')
+  # If we walked all the way back to wrapper.py, it means we're trying to run
+  # an external module.  So we have to import it by filepath and not via the
+  # chromite.xxx.yyy namespace.
+  if target != 'wrapper.py':
+    assert parent.startswith(CHROMITE_PATH), (
+        'could not figure out leading path\n'
+        '\tparent: %s\n'
+        '\tCHROMITE_PATH: %s' % (parent, CHROMITE_PATH))
+    parent = parent[len(CHROMITE_PATH):].split(os.sep)
+    target = ['chromite'] + parent + [target]
 
-  module = cros_import.ImportModule(target)
+    if target[-2] == 'bin':
+      # Convert <path>/bin/foo -> <path>/scripts/foo.
+      target[-2] = 'scripts'
+    elif target[1] == 'bootstrap' and len(target) == 3:
+      # Convert <git_repo>/bootstrap/foo -> <git_repo>/bootstrap/scripts/foo.
+      target.insert(2, 'scripts')
+
+    try:
+      module = cros_import.ImportModule(target)
+    except ImportError as e:
+      print('%s: could not import chromite module: %s: %s'
+            % (sys.argv[0], full_path, e), file=sys.stderr)
+      sys.exit(1)
+  else:
+    try:
+      module = imp.load_source('main', full_path)
+    except IOError as e:
+      print('%s: could not import external module: %s: %s'
+            % (sys.argv[0], full_path, e), file=sys.stderr)
+      sys.exit(1)
 
   # Run the module's main func if it has one.
   main = getattr(module, 'main', None)
diff --git a/signing/signer_instructions/README b/signing/signer_instructions/README
new file mode 100644
index 0000000..e7f90f1
--- /dev/null
+++ b/signing/signer_instructions/README
@@ -0,0 +1,76 @@
+=== PREFACE ===
+NOTE: The files in chromite/ are currently only used for testing.  The actual
+files used by releases live in crostools/signer_instructions/.  The program
+managers would prefer to keep them internal for now.
+
+=== OVERVIEW ===
+This directory holds instruction files that are used when uploading files for
+signing with official keys.  The pushimage script will process them to create
+output instruction files which are then posted to a Google Storage bucket that
+the signing processes watch.  The input files tell pushimage how to operate,
+and output files tell the signer how to operate.
+
+This file covers things that pushimage itself cares about.  It does not get into
+the fields that the signer utilizes.  See REFERENCES below for that.
+
+=== FILES ===
+DEFAULT.instructions - default values for all boards/artifacts; loaded first
+DEFAULT.$TYPE.instructions - default values for all boards for a specific type
+$BOARD.instructions - default values for all artifacts for $BOARD, and used for
+                      recovery images
+$BOARD.$TYPE.instructions - values specific to a board and artifact type; see
+                            the --sign-types argument to pushimage
+
+=== FORMAT ===
+There are a few main sections that pushimage cares about:
+[insns]
+[insns.XXX]  (Where XXX can be anything)
+[general]
+
+Other sections are passed through to the signer untouched, and many fields in
+the above sections are also unmodified.
+
+The keys that pushimage looks at are:
+[insns]
+channels = comma/space delimited list of the channels to flag for signing
+keysets = comma/space delimited list of the keysets to use when signing
+
+A bunch of fields will also be clobbered in the [general] section as pushimage
+writes out metadata based on the command line flags/artifacts.
+
+=== MULTI CHANNEL/KEYSET ===
+When you want to sign a single board/artifact type for multiple channels or
+keysets, simply list them in insns.channels and insn.keysets.  The pushimage
+script will take care of posting to the right subdirs and creating unique
+filenames based on those.
+
+=== MULTI INPUTS ===
+When you want to sign multiple artifacts for a single board (and all the same
+artifact type), you need to use the multiple input form instead.  When you
+create multiple sections that start with "insns.", pushimage will overlay that
+on top of the insns section, and then produce multiple ouput requests.
+
+So if you wrote a file like:
+  [insns]
+  channel = dev
+  [insns.one]
+  keyset = Zinger
+  input_files = zinger/ec.bin
+  [insns.two]
+  keyset = Hoho
+  input_files = hoho/ec.bin
+
+Pushimage will produce two requests for the signer:
+  [insns]
+  channel = dev
+  keyset = Zinger
+  input_files = zinger/ec.bin
+And:
+  [insns]
+  channel = dev
+  keyset = Hoho
+  input_files = hoho/ec.bin
+
+=== REFERENCES ===
+For details on the fields that the signer uses:
+https://sites.google.com/a/google.com/chromeos/resources/engineering/releng/signer-documentation
diff --git a/signing/signer_instructions/test.multi.instructions b/signing/signer_instructions/test.multi.instructions
new file mode 100644
index 0000000..2ac94a1
--- /dev/null
+++ b/signing/signer_instructions/test.multi.instructions
@@ -0,0 +1,20 @@
+[insns]
+channel = dev canary
+chromeos_shell = false
+ensure_no_password = true
+firmware_update = true
+security_checks = true
+create_nplusone = true
+override = base
+
+[insns.one]
+keyset = OneKeyset
+override = alt
+
+[insns.two]
+keyset = TwoKeyset
+channel = best
+
+[insns.hotsoup]
+keyset = ColdKeyset
+soup = cheddar