Merge to upstream 7c8181bedebe0edbd43c5d14c760f0011d8b3fcc.
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..af1b7ac
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,17 @@
+# Generated files
+*.pyc
+*~
+*.swp
+buildbot/cbuildbot.log
+buildbot/revisions.pfq
+.project
+.pydevproject
+buildbot/.completed_stages
+
+# source cross-reference files.
+tags
+.ctags
+cscope.out
+
+# Directory that might hold site config checkout.
+config/
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..563c2ec
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,6 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc. <opensource@google.com>
+Code Aurora Forum, Inc. <opensource@codeaurora.org>
+NVIDIA <chromium-os@nvidia.com>
diff --git a/COMMIT-QUEUE.ini b/COMMIT-QUEUE.ini
new file mode 100644
index 0000000..baecb5b
--- /dev/null
+++ b/COMMIT-QUEUE.ini
@@ -0,0 +1,11 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Per-project Commit Queue settings.
+# Documentation: http://goo.gl/5J7oND
+
+[GENERAL]
+
+# Run the default configs plus the binhost-pre-cq.
+pre-cq-configs: default binhost-pre-cq
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..0aa7fc9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,27 @@
+// Copyright (c) 2006-2009 The Chromium OS Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//    * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//    * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//    * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..0875a84
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,2 @@
+davidjames@chromium.org
+vapier@chromium.org
diff --git a/PRESUBMIT.cfg b/PRESUBMIT.cfg
new file mode 100644
index 0000000..a38497e
--- /dev/null
+++ b/PRESUBMIT.cfg
@@ -0,0 +1,3 @@
+[Hook Scripts]
+hook0=bin/cros lint ${PRESUBMIT_FILES}
+hook1=bin/preupload_dump_config
diff --git a/README.chromium b/README.chromium
new file mode 100644
index 0000000..7f2bad0
--- /dev/null
+++ b/README.chromium
@@ -0,0 +1,12 @@
+Name: chromite
+Short Name: chromite
+URL: https://chromium.googlesource.com/chromiumos/chromite
+Version: 0.0.2
+License: BSD
+License File: LICENSE
+Security Critical: no
+
+Description:
+This contains scripts used to build Chromium for Chromium OS
+('cros chrome-sdk'), as well as interact with the Chromium OS
+build system.
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000..324d57a
--- /dev/null
+++ b/__init__.py
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import os
+import sys
+
+# Add the third_party/ dir to our search path so that we can find the
+# modules in there automatically.  This isn't normal, so don't replicate
+# this pattern elsewhere.
+_chromite_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__)))
+_containing_dir = os.path.dirname(_chromite_dir)
+_third_party_dirs = [os.path.join(_chromite_dir, 'third_party')]
+# If chromite is living inside the Chrome checkout under
+# <chrome_root>/src/third_party/chromite, its dependencies will be checked out
+# to <chrome_root>/src/third_party instead of the normal chromite/third_party
+# location due to git-submodule limitations (a submodule cannot be contained
+# inside another submodule's workspace), so we want to add that to the
+# search path.
+if os.path.basename(_containing_dir) == 'third_party':
+  _third_party_dirs.append(_containing_dir)
+
+# List of third_party packages that might need subpaths added to search.
+_paths = [
+    'dpkt',
+    os.path.join('gdata', 'src'),
+    'pyelftools',
+    'swarming.client',
+]
+
+for _path in _paths:
+  for _third_party in _third_party_dirs[:]:
+    _component = os.path.join(_third_party, _path)
+    if os.path.isdir(_component):
+      _third_party_dirs.append(_component)
+sys.path = _third_party_dirs + sys.path
diff --git a/appengine/.gitignore b/appengine/.gitignore
new file mode 100644
index 0000000..b65f7ad
--- /dev/null
+++ b/appengine/.gitignore
@@ -0,0 +1,2 @@
+/google_appengine
+/google_appengine_*
diff --git a/appengine/.testignore b/appengine/.testignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/.testignore
diff --git a/appengine/README b/appengine/README
new file mode 100644
index 0000000..38fbc14
--- /dev/null
+++ b/appengine/README
@@ -0,0 +1,21 @@
+AppEngine supports Python 2.5 and 2.7:
+  https://developers.google.com/appengine/docs/python/
+
+A utility script dev_appserver is in this directory to automatically
+download the sdk and call ./google_appengine/dev_appserver.py with
+arguments for serving at <yourhostname>:8080 (which can then be
+accessed from other machines on intranet).  See contents of script
+for details on what it does (it is very short).
+
+Examples based on project name chromiumos-build-stats.
+
+To start the dev server on <yourhostname>:8080 do this:
+%> ./dev_appserver chromiumos-build-stats/app.yaml
+
+To clear DB before starting:
+%> ./dev_appserver -c chromiumos-build-stats/app.yaml
+
+To update on chromiumos-build-stats.appspot.com (WHEN READY):
+%> ./google_appengine/appcfg.py update chromiumos-build-stats/
+
+See cq_stats/README to learn about how to work with that zapping good app!
diff --git a/appengine/ae_pylint b/appengine/ae_pylint
new file mode 100755
index 0000000..6557f26
--- /dev/null
+++ b/appengine/ae_pylint
@@ -0,0 +1,18 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple wrapper around pylint that allows imports to work.
+# The import path is unusual because these python files normally run
+# as an AppEngine instance, so the import paths are set up to find
+# libraries when run there.  They are not set up to find libraries
+# rooted at 'chromite'.
+
+# Run in appengine project directory.  Example:
+# cd chromiumos-build-stats ; ../ae_pylint main.py
+# Or run from this directory.  Example:
+# ./ae_pylint chromiumos-build-stats/main.py
+
+ROOT=$(realpath $(dirname $0))
+PYTHONPATH="${ROOT}"/google_appengine pylint --rcfile="${ROOT}"/../pylintrc $@
diff --git a/appengine/ae_shell b/appengine/ae_shell
new file mode 100755
index 0000000..24df18f
--- /dev/null
+++ b/appengine/ae_shell
@@ -0,0 +1,202 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+APP_YAML="app.yaml"
+DEFAULT_SDK_MIRROR="https://storage.googleapis.com/appengine-sdks/featured/google_appengine_1.9.19.zip"
+# Apps can further modify the appengine sdk by providing this shell script in
+# their top level directory. This is needed because the turnaround time to
+# submitting patches upstream to the SDK is rather large.
+# WARNING: Remember that this only changes the local installation of the SDK.
+# So, this is only useful to fix bugs that make local development hard. AE
+# will use a non-patched version of the SDK.
+# The script will be run as:
+#   sdk_mod <absolute/path/to/sdk>
+APPENGINE_SDK_MOD_FILE="appengine_sdk_mod"
+
+PYTHONPATH_PREFIX=""
+PATH_PREFIX=""
+PS1_PREFIX=""
+
+usage() {
+  cat << EOF
+Usage: ${BASH_SOURCE} <app_dir>
+
+Use this script to enter an environment to develop an appengine app in.
+This script will:
+  - Download the requested version of SDK if it's not already available.
+  - Set up the environment in the new shell so that relevant SDK and project
+    tools are available, and PYTHONPATH is setup to use these tools.
+
+You can create some files under your toplevel directory to modify the
+behaviour of this script for your project:
+  - appengine_sdk_mod: A bash script that will be executed by this script as:
+        ./fancy_project/appengine_sdk_mod <absolute/path/to/AE/SDK>
+        This script can be used to modify the *local installation only* of the
+        SDK. This can, for example, fixup the SDK to ease local development.
+        For an example, see cq_stats/appengine_sdk_mod.
+EOF
+}
+
+enter_ae_shell() {
+  local rcfile="$(mktemp)"
+
+  cat >"${rcfile}" << EOF
+[[ -e ~/.bashrc ]] && source ~/.bashrc
+
+export PYTHONPATH="${PYTHONPATH_PREFIX}:\${PYTHONPATH}"
+export PATH="${PATH_PREFIX}:\${PATH}"
+export PS1="${PS1_PREFIX} \${PS1}"
+
+# Clear BASH_ENV so that if a subshell is launched, we don't
+# get sourced twice. (This file is going to dissapear after the first time it's
+# sourced.)
+unset BASH_ENV
+rm -f "${rcfile}"
+EOF
+
+  info "Entering ae_shell for ${appname}..."
+  if [[ $# -eq 0 ]]; then
+    # Enter a shell that will survive successful completion of this script, and
+    # will have the new environment setup for the user.
+    exec bash --rcfile "${rcfile}" -i
+  else
+    # A command was given, run that command in the new shell.
+    # bash will ignore BASH_ENV if it detects that it's launched by sshd.
+    # Trick it!
+    unset SSH_CLIENT
+    unset SSH_CONNECTION
+    unset SSH_TTY
+    BASH_ENV=${rcfile} exec bash -c '"$@"' "$@"
+  fi
+}
+
+prepare_sdk() {
+  local -r appengine_dir="$1"
+  local -r ae_sdk_dir="$2"
+  local -r appname="$3"
+
+  if [[ ! -d "${ae_sdk_dir}" ]]; then
+    local temp_ae_sdk_dir="temp_ae_sdk_dir"
+
+    info "Using appegine SDK mirror ${DEFAULT_SDK_MIRROR}"
+
+    rm -rf "${temp_ae_sdk_dir}"
+    mkdir -p "${temp_ae_sdk_dir}"
+    info "Downloading appengine SDK"
+    local sdk_zip="${temp_ae_sdk_dir}/sdk.zip"
+    wget -c "${DEFAULT_SDK_MIRROR}" -O "${sdk_zip}"
+    if [[ $? -ne 0 ]]; then
+      error "Failed to download SDK from ${DEFAULT_SDK_MIRROR}"
+      rm -rf "${temp_ae_sdk_dir}"
+      return ${E_GENERAL}
+    fi
+
+    info "Unpacking..."
+    unzip -q "${sdk_zip}" -d "${temp_ae_sdk_dir}"
+    if [[ $? -ne 0 ]]; then
+      error "Failed to unzip ${sdk_zip}."
+      rm -rf "${temp_ae_sdk_dir}"
+      return ${E_GENERAL}
+    fi
+
+    mv "${temp_ae_sdk_dir}/google_appengine" "${ae_sdk_dir}"
+    rm -rf "${temp_ae_sdk_dir}"
+
+    if [[ -f "${appname}/${APPENGINE_SDK_MOD_FILE}" ]]; then
+      info "Running appengine sdk mod script from " \
+          "${appname}/${APPENGINE_SDK_MOD_FILE}"
+      if ! "./${appname}/${APPENGINE_SDK_MOD_FILE}" \
+          "${appengine_dir}/${ae_sdk_dir}"; then
+        return ${E_GENERAL}
+      fi
+    fi
+  fi
+
+  info "Using appengine SDK at ${ae_sdk_dir}"
+  return 0
+}
+
+setup_django_path() {
+  local -r appengine_dir="$1"
+  local -r ae_sdk_dir="$2"
+  local -r appname="$3"
+
+  if [[ ! -f "${appname}/${APP_YAML}" ]]; then
+    return ${E_GENERAL}
+  fi
+
+  local django_version
+  django_version="$(awk '$0 == "- name: django" { getline; print $NF }' \
+                    "${appname}/${APP_YAML}")"
+  if [[ -z "${django_version}" ]]; then
+    return ${E_GENERAL}
+  fi
+
+  info "Setting django version to ${django_version}"
+  django_dir="${ae_sdk_dir}/lib/django-${django_version}"
+  PYTHONPATH_PREFIX="${appengine_dir}/${django_dir}:${PYTHONPATH_PREFIX}"
+  PATH_PREFIX="${appengine_dir}/${django_dir}/django/bin:${PATH_PREFIX}"
+}
+
+# This sets up the chromite path so that chromite is available inside ae_shell.
+# Note that this is different from using chromite/scripts/wrapper.py because the
+# appengine apps that launched / deployed inside the ae_shell run in an
+# environment controlled by the AE SDK's dev_appserver.py
+# This ensures that chromite is available inside that environment as well.
+setup_chromite_path() {
+  local -r appengine_dir="$1"
+  # Must go deeper.
+  local basedir
+  base_dir="$(dirname "$(dirname "${appengine_dir}")")"
+  PYTHONPATH_PREFIX="${base_dir}:${PYTHONPATH_PREFIX}"
+}
+
+main() {
+  local -r appengine_dir="$(readlink -e "$(dirname "${BASH_SOURCE}")")"
+  source "${appengine_dir}/common.sh"
+
+  # Argument parsing.
+  local -r appdir="$1"
+  shift
+
+  if [[ $# -gt 0 && "$1" != "--" ]]; then
+    error "Unexpected argument: $1"
+    usage
+    exit ${E_GENERAL}
+  fi
+  # End argument parsing.
+
+  local -r appname="$(basename "${appdir}")"
+  local -r ae_sdk_dir="google_appengine_${appname}"
+
+  local appname_shell="$(echo "${appname}" | tr '[:lower:]' '[:upper:]')"
+
+  if [[ ! -d "${appdir}" ]]; then
+    error "'${appdir}' is not an appengine app source directory!"
+    usage
+    exit ${E_GENERAL}
+  fi
+
+  info "Found appengine directory ${appengine_dir}"
+  info "Found appengine app ${appname} at ${appdir}"
+
+  pushd "${appengine_dir}" >/dev/null
+
+  if ! prepare_sdk "${appengine_dir}" "${ae_sdk_dir}" "${appname}"; then
+    exit ${E_GENERAL}
+  fi
+
+  setup_django_path "${appengine_dir}" "${ae_sdk_dir}" "${appname}"
+  setup_chromite_path "${appengine_dir}"
+  PYTHONPATH_PREFIX="${appengine_dir}/${ae_sdk_dir}:${PYTHONPATH_PREFIX}"
+  PYTHONPATH="${appengine_dir}/${appname}:${PYTHONPATH}"
+  PATH_PREFIX="${appengine_dir}/${ae_sdk_dir}:${appengine_dir}:${PATH_PREFIX}"
+  PS1_PREFIX="AE:${appname_shell}${PS1_PREFIX}"
+
+  popd >/dev/null
+  enter_ae_shell "$@"
+}
+
+main "$@"
diff --git a/appengine/chromiumos-build-stats/app.yaml b/appengine/chromiumos-build-stats/app.yaml
new file mode 100644
index 0000000..cadac75
--- /dev/null
+++ b/appengine/chromiumos-build-stats/app.yaml
@@ -0,0 +1,25 @@
+application: chromiumos-build-stats
+version: 6
+runtime: python27
+api_version: 1
+threadsafe: false
+
+handlers:
+- url: /stylesheets
+  static_dir: stylesheets
+  secure: always
+
+- url: /upload_command_stats
+  script: main.app
+  secure: always
+
+- url: /.*
+  script: main.app
+  secure: always
+  login: required
+
+libraries:
+- name: webapp2
+  version: latest
+- name: jinja2
+  version: latest
diff --git a/appengine/chromiumos-build-stats/index.html b/appengine/chromiumos-build-stats/index.html
new file mode 100644
index 0000000..17a6087
--- /dev/null
+++ b/appengine/chromiumos-build-stats/index.html
@@ -0,0 +1,87 @@
+<!DOCTYPE html>
+{% autoescape true %}
+<html>
+  <head>
+    <link type="text/css" rel="stylesheet" href="/stylesheets/main.css" />
+  </head>
+  <body>
+    <div align="center">
+      <h2>Build Command Statistics - Prototype</h2>
+      <p>
+        This is an admittedly primitive interface to the build command
+        statistics gathered every time a build command (currently just
+        build_packages) is run in golo.chromium.org or corp.google.com.
+      </p>
+      <p>
+        Create
+        <a href="https://developers.google.com/appengine/docs/python/datastore/gqlreference">GCL queries</a>
+        to search the statistics database.  GCL has no SELECT or FROM clauses.  This site
+        supports a SQL-like SELECT clause for column filtering, but no FROM clause is
+        needed/supported.  If no SELECT clause is given then default columns are selected
+        (see below).  A default LIMIT 30 is used unless otherwise specified.
+      </p>
+    </div>
+
+    <hr>
+    <h3>Prepare a new query</h3>
+    <form action="/stats" method="get">
+      <h5>Write your own query</h5>
+      <div><textarea name="query" rows="2" cols="100">{{ user_query }}</textarea></div>
+      <div>
+        display format:
+        <input type="radio" name="format" value="table" checked>table (default)
+        <input type="radio" name="format" value="json">json
+      </div>
+      <div><input type="submit" value="Submit Query"></div>
+    </form>
+
+    <form action="/stats" method="get">
+      <h5>Select an example query</h5>
+      <div>
+        <select name="query">
+          {% for example_query in example_queries %}
+            <option>{{ example_query }}</option>
+          {% endfor %}
+        </select>
+      </div>
+      <div>
+        display format:
+        <input type="radio" name="format" value="table" checked>table (default)
+        <input type="radio" name="format" value="json">json
+      </div>
+      <div><input type="submit" value="Submit Query"></div>
+    </form>
+
+    <hr>
+    <div><h3>Query results</h3></div>
+    <div>For query: <b>{{ user_query }}</b></div>
+    <p/>
+    {% if error_msg %}
+    <b><font color="red">{{ error_msg }}</font></b>
+    {% else %}
+    <table border="1">
+      {% for row in results_table %}
+      <tr>
+        {% for cell in row %}
+        <td>{{ cell }}</td>
+        {% endfor %}
+      </tr>
+      {% endfor %}
+    </table>
+    {% endif %}
+    <p/>
+
+    <hr>
+    <div><h3>Database columns available</h3></div>
+    <ul>
+    {% for col in column_list %}
+    <li>{{ col }}</li>
+    {% endfor %}
+    </ul>
+    <hr>
+    <div align="center">
+      Signed in as <b>{{ user_email }}</b>
+    </div>
+  </body>
+</html>
+{% endautoescape %}
diff --git a/appengine/chromiumos-build-stats/index.yaml b/appengine/chromiumos-build-stats/index.yaml
new file mode 100644
index 0000000..115e7f1
--- /dev/null
+++ b/appengine/chromiumos-build-stats/index.yaml
@@ -0,0 +1,132 @@
+indexes:
+
+# AUTOGENERATED
+
+# This index.yaml is automatically updated whenever the dev_appserver
+# detects that a new type of query is run.  If you want to manage the
+# index.yaml file manually, remove the above marker line (the line
+# saying "# AUTOGENERATED").  If you want to manage some indexes
+# manually, move them above the marker line.  The index.yaml file is
+# automatically uploaded to the admin console when you next deploy
+# your application using appcfg.py.
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+  - name: end_date
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+  - name: end_datetime
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+  - name: end_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: cmd_base
+  - name: end_date
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: cmd_base
+  - name: end_datetime
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+  - name: end_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+  - name: end_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_datetime
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_datetime
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: run_time
+  - name: end_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: run_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: username
+  - name: end_date
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: username
+  - name: end_date
+    direction: desc
diff --git a/appengine/chromiumos-build-stats/main.py b/appengine/chromiumos-build-stats/main.py
new file mode 100644
index 0000000..6d27637
--- /dev/null
+++ b/appengine/chromiumos-build-stats/main.py
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import webapp2
+
+import stats
+
+# Application configuration.
+URLS = [
+  ('/', stats.MainPage),
+  ('/stats', stats.MainPage),
+  ('/upload_command_stats', stats.PostPage),
+]
+app = webapp2.WSGIApplication(URLS, debug=True)
diff --git a/appengine/chromiumos-build-stats/model.py b/appengine/chromiumos-build-stats/model.py
new file mode 100644
index 0000000..bd1fca7
--- /dev/null
+++ b/appengine/chromiumos-build-stats/model.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""All database model classes for this AppEngine instance."""
+
+from google.appengine.ext import db
+
+class Statistics(db.Model):
+  """Each entry holds stats for one build command run."""
+
+  # Properties common to all commands.
+  end_datetime = db.DateTimeProperty(auto_now_add=True)
+  end_date = db.DateProperty()
+  end_time = db.TimeProperty()
+  cmd_line = db.StringProperty()
+  cmd_base = db.StringProperty()
+  cmd_args = db.StringProperty()
+  run_time = db.IntegerProperty()
+  username = db.StringProperty()
+  board = db.StringProperty()
+  host = db.StringProperty()
+  cpu_count = db.StringProperty()
+  cpu_type = db.StringProperty()
+
+  # Properties for build_packages only.
+  package_count = db.IntegerProperty()
diff --git a/appengine/chromiumos-build-stats/stats.py b/appengine/chromiumos-build-stats/stats.py
new file mode 100644
index 0000000..68b6589
--- /dev/null
+++ b/appengine/chromiumos-build-stats/stats.py
@@ -0,0 +1,258 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import datetime
+import json
+import os
+import re
+
+from chromite.lib import cros_logging as logging
+
+from google.appengine.api import datastore_errors
+from google.appengine.ext import db
+from google.appengine.api import users
+
+import webapp2
+import jinja2
+
+import model
+
+# Could replace this with a function if there is ever any reason
+# to spread entries over multiple datastores.  Consistency is only
+# gauranteed within a datastore, but access should be limited to
+# about 1 per second.  That should not be a problem for us.
+DATASTORE_KEY = db.Key.from_path('Stats', 'default')
+
+JINJA_ENVIRONMENT = jinja2.Environment(
+    loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
+    extensions=['jinja2.ext.autoescape'],
+    autoescape=True)
+
+
+class MainPage(webapp2.RequestHandler):
+  """Provide interface for interacting with DB."""
+
+  # Regex to peel SQL-like SELECT off front, if present, grabbing SELECT args.
+  # Example: "SELECT foo,bar WHERE blah blah"
+  #          ==> group(1)="foo,bar", group(2)="WHERE blah blah"
+  # Example: "SELECT foo , bar"
+  #          ==> group(1)="foo , bar", group(2)=""
+  # Example: "WHERE blah blah"
+  #          ==> No match
+  QUERY_SELECT_PREFIX_RE = re.compile(r'^\s*SELECT\s+'
+                                      r'([^\s,]+(?:\s*,\s*[^\s,]+)*)' # Group 1
+                                      r'(?:$|\s+)(.*)',               # Group 2
+                                      re.IGNORECASE | re.VERBOSE)
+
+  # Regex to determine if WHERE is present, and capture everything after it.
+  # Example: "WHERE foo=bar ORDER BY whatever"
+  #          ==> group(1)="foo=bar ORDER BY whatever"
+  # Example: "ORDER BY whatever"
+  #          ==> No match
+  QUERY_WHERE_PREFIX_RE = re.compile(r'^WHERE\s+(.+)$',
+                                     re.IGNORECASE | re.VERBOSE)
+
+  # Regex to discover ORDER BY columns in order to highlight them in results.
+  QUERY_ORDER_RE = re.compile(r'ORDER\s+BY\s+(\S+)', re.IGNORECASE)
+
+  # Regex to discover LIMIT value in query.
+  QUERY_LIMIT_RE = re.compile(r'LIMIT\s+(\d+)', re.IGNORECASE)
+
+  # Regex for separating tokens by commas, allowing spaces on either side.
+  COMMA_RE = re.compile(r'\s*,\s*')
+
+  # Default columns to show in results table if no SELECT given.
+  DEFAULT_COLUMNS = ['end_date', 'cmd_line', 'run_time', 'board',
+                     'package_count']
+
+  # All possible columns in Statistics model.
+  ALL_COLUMNS = sorted(model.Statistics.properties())
+
+  # Provide example queries in interface as a form of documentation.
+  EXAMPLE_QUERIES = [
+    ("ORDER BY end_date,run_time"
+     " LIMIT 30"),
+    ("WHERE username='mtennant'"
+     " ORDER BY end_date DESC"
+     " LIMIT 30"),
+    ("SELECT end_datetime,cmd_base,cmd_args,run_time,package_count"
+     " WHERE board='amd64-generic'"
+     " ORDER BY end_datetime"
+     " LIMIT 30"),
+    ("SELECT end_date,cmd_base,run_time,board,package_count"
+     " WHERE end_date=DATE('2012-03-28')"
+     " ORDER BY run_time"
+     " LIMIT 30"),
+    ("SELECT end_date,cmd_base,cmd_args,run_time,username"
+     " WHERE run_time>20"
+     " LIMIT 30"),
+    ]
+
+  def get(self):
+    """Support GET to stats page."""
+    # Note that google.com authorization is required to access this page, which
+    # is controlled in app.yaml and on appspot admin page.
+    orig_query = self.request.get('query')
+    logging.debug('Received raw query %r', orig_query)
+
+    # If no LIMIT was provided, default to a LIMIT of 30 for sanity.
+    if not self.QUERY_LIMIT_RE.search(orig_query):
+      orig_query += ' LIMIT 30'
+
+    query = orig_query
+
+    # Peel off "SELECT" clause from front of query.  GCL does not support SELECT
+    # filtering, but we will support it right here to select/filter columns.
+    query, columns = self._RemoveSelectFromQuery(query)
+    if query == orig_query and columns == self.DEFAULT_COLUMNS:
+      # This means there was no SELECT in query.  That is equivalent to
+      # SELECT of default columns, so show that to user.
+      orig_query = 'SELECT %s %s' % (','.join(columns), orig_query)
+
+    # All queries should have the "ancestor" WHERE clause in them, but that
+    # need not be exposed to interface.  Insert the clause intelligently.
+    query = self._AdjustWhereInQuery(query)
+
+    stat_entries = []
+    error_msg = None
+    try:
+      stat_entries = model.Statistics.gql(query, DATASTORE_KEY)
+    except datastore_errors.BadQueryError as ex:
+      error_msg = '<p>%s.</p><p>Actual GCL query used: "%s"</p>' % (ex, query)
+
+    if self.request.get('format') == 'json':
+      # Write output in the JSON format.
+      d = self._ResultsToDictionary(stat_entries, columns)
+
+      class CustomEncoder(json.JSONEncoder):
+        """Handles non-serializable classes by converting them to strings."""
+        def default(self, obj):
+          if (isinstance(obj, datetime.datetime) or
+              isinstance(obj, datetime.date) or
+              isinstance(obj, datetime.time)):
+            return obj.isoformat()
+
+          return json.JSONEncoder.default(self, obj)
+
+      self.response.content_type = 'application/json'
+      self.response.write(json.dumps(d, cls=CustomEncoder))
+    else:
+      # Write output to the HTML page.
+      results_table = self._PrepareResultsTable(stat_entries, columns)
+      template_values = {
+          'error_msg': error_msg,
+          'gcl_query': query,
+          'user_query': orig_query,
+          'user_email': users.get_current_user(),
+          'results_table': results_table,
+          'column_list': self.ALL_COLUMNS,
+          'example_queries': self.EXAMPLE_QUERIES,
+      }
+      template = JINJA_ENVIRONMENT.get_template('index.html')
+      self.response.write(template.render(template_values))
+
+  def _RemoveSelectFromQuery(self, query):
+    """Remove SELECT clause from |query|, return tuple (new_query, columns)."""
+    match = self.QUERY_SELECT_PREFIX_RE.search(query)
+    if match:
+      # A SELECT clause is present.  Remove it but save requested columns.
+      columns = self.COMMA_RE.split(match.group(1))
+      query = match.group(2)
+
+      if columns == ['*']:
+        columns = self.ALL_COLUMNS
+
+      logging.debug('Columns selected for viewing: %s', ', '.join(columns))
+      return query, columns
+    else:
+      logging.debug('Using default columns for viewing: %s',
+                    ', '.join(self.DEFAULT_COLUMNS))
+      return query, self.DEFAULT_COLUMNS
+
+  def _AdjustWhereInQuery(self, query):
+    """Insert WHERE ANCESTOR into |query| and return."""
+    match = self.QUERY_WHERE_PREFIX_RE.search(query)
+    if match:
+      return 'WHERE ANCESTOR IS :1 AND %s' % match.group(1)
+    else:
+      return 'WHERE ANCESTOR IS :1 %s' % query
+
+  def _PrepareResultsTable(self, stat_entries, columns):
+    """Prepare table for |stat_entries| using only |columns|."""
+    # One header blank for row numbers, then each column name.
+    table = [[c for c in [''] + columns]]
+    # Prepare list of table rows, one for each stat entry.
+    for stat_ix, stat_entry in enumerate(stat_entries):
+      row = [stat_ix + 1]
+      row += [getattr(stat_entry, col) for col in columns]
+      table.append(row)
+
+    return table
+
+  def _ResultsToDictionary(self, stat_entries, columns):
+    """Converts |stat_entries| to a dictionary with |columns| as keys.
+
+    Args:
+      stat_entries: A list of GqlQuery objects.
+      columns: A list of keys to use.
+
+    Returns:
+      A dictionary with |columns| as keys.
+    """
+    stats_dict = dict()
+    keys = [c for c in columns]
+    for stat_ix, stat_entry in enumerate(stat_entries):
+      stats_dict[stat_ix] = dict(
+          (col, getattr(stat_entry, col)) for col in columns)
+
+    return stats_dict
+
+
+class PostPage(webapp2.RequestHandler):
+  """Provides interface for uploading command stats to database."""
+
+  NO_VALUE = '__NO_VALUE_AT_ALL__'
+
+  def post(self):
+    """Support POST of command stats."""
+    logging.info('Stats POST received at %r', self.request.uri)
+
+    new_stat = model.Statistics(parent=DATASTORE_KEY)
+
+    # Check each supported DB property to see if it has a value set
+    # in the POST request.
+    for prop in model.Statistics.properties():
+      # Skip properties with auto_now or auto_now_add enabled.
+      model_prop = getattr(model.Statistics, prop)
+      if ((hasattr(model_prop, 'auto_now_add') and model_prop.auto_now_add) or
+          (hasattr(model_prop, 'auto_now') and model_prop.auto_now)):
+        continue
+
+      # Note that using hasattr with self.request does not work at all.
+      # It (almost) always says the attribute is not present, when getattr
+      # does actually return a value.  Also note that self.request.get is
+      # not returning None as the default value if no explicit default value
+      # is provided, contrary to the spec for dict.get.
+      value = self.request.get(prop, self.NO_VALUE)
+
+      if value is not self.NO_VALUE:
+        # String properties must be 500 characters or less (GQL requirement).
+        if isinstance(model_prop, db.StringProperty) and len(value) > 500:
+          logging.debug('  String property %r too long.  Cutting off at 500'
+                        ' characters.', prop)
+          value = value[:500]
+
+        # Integer properties require casting
+        if isinstance(model_prop, db.IntegerProperty):
+          value = int(value)
+
+        logging.debug('  Stats POST property %r ==> %r', prop, value)
+        setattr(new_stat, prop, value)
+
+    # Use automatically set end_datetime prop to set end_date and end_time.
+    new_stat.end_time = new_stat.end_datetime.time()
+    new_stat.end_date = new_stat.end_datetime.date()
+
+    # Save to model.
+    new_stat.put()
diff --git a/appengine/chromiumos-build-stats/stylesheets/main.css b/appengine/chromiumos-build-stats/stylesheets/main.css
new file mode 100644
index 0000000..a8666e2
--- /dev/null
+++ b/appengine/chromiumos-build-stats/stylesheets/main.css
@@ -0,0 +1,4 @@
+body {
+  font-family: Verdana, Helvetica, sans-serif;
+  background-color: #DDDDDD;
+}
diff --git a/appengine/common.sh b/appengine/common.sh
new file mode 100644
index 0000000..4cf5ba0
--- /dev/null
+++ b/appengine/common.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+readonly E_GENERAL=1
+
+error() {
+  (
+  # Red log line.
+  tput setaf 1
+  echo "ERROR: $1"
+  tput sgr0
+  ) >&2
+}
+
+warning() {
+  (
+  # Yellow warning line.
+  tput setaf 3
+  echo "WARNING: $1"
+  tput sgr0
+  ) >&2
+}
+
+info() {
+  echo "INFO: $1"
+}
diff --git a/appengine/cq_stats/.gitignore b/appengine/cq_stats/.gitignore
new file mode 100644
index 0000000..a55e0c2
--- /dev/null
+++ b/appengine/cq_stats/.gitignore
@@ -0,0 +1,2 @@
+/annotator_cidb_creds
+/cq_stats/static
diff --git a/appengine/cq_stats/README b/appengine/cq_stats/README
new file mode 100644
index 0000000..5a59a8a
--- /dev/null
+++ b/appengine/cq_stats/README
@@ -0,0 +1,51 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+This project contains the cros-cq-stats-sheet app.
+
+WARNING: A word about using django: In general, don't.
+We're still discussing whether django is a good choice for future apps. It's not
+the standard template engine used by other infra apps. Before you start writing
+a new app for infra, email chromeos-infra-discuss@.
+
+Local development
+=================
+- We require cidb credentials to be made available to the app in a very specific
+  way. Create a directory / symlink to your *annotator* user credentials for the
+  *debug-cidb* instance named "annotator_cidb_creds". See go/cros-cidb-admin to
+  obtain these credentials.
+- Then from chromite/appengine, you can launch the local dev_appserver using:
+    $ ./cq_stats/dev_appserver
+- There are two kinds of changes that will not be picked up by the dev_appserver
+  automatically:
+  - changes to chromite/ outside of the cq_stats app. (Yes, chromite/ is
+    available to the app, just like to any other code under chromite)
+  - changes to static files.
+
+Deploying the app.
+=============
+Simply use `deploy_app` provided in this directory.
+- You should first deploy the the 'dbg' instance of the app, verify that
+  everything is as you expect by navigating to go/chromiumos-build-annotator-dbg
+- Only then should you deploy to 'prod'.
+
+The script requires you to obtain the secret key used to encrypt the cookies for
+pages serverd from:
+- debug: You can find the key in valentine under
+    Description/hostname: google.com:chromiumos-build-annotator-dbg
+    Purpose/username: build_annotations app secret_key
+- prod: You can find the key in valentine under
+    Description/hostname: google.com:chromiumos-build-annotator
+    Purpose/username: build_annotations app secret_key
+
+Deploy-Troubleshooting
+===============
+If deployment succeeds but the app fails for some reason, you'll get a very
+unhelpful page without any stack trace. This is by design. You should redeploy
+with DEBUG turned on. To do this, set DEBUG to True in the "DEPLOY OVERRIDES"
+section in cq_stats/settings.py
+Other settings autogenerated for deploy can also be overridden there.
+
+pylint-pro-tip: Enter the ae_shell (chromite/appengine/ae_shell cq_stats) before
+running pylint, so you it can resolve all imports.
diff --git a/appengine/cq_stats/app.yaml b/appengine/cq_stats/app.yaml
new file mode 100644
index 0000000..417060d
--- /dev/null
+++ b/appengine/cq_stats/app.yaml
@@ -0,0 +1,21 @@
+application: google.com:chromiumos-build-annotator-dbg
+version: 1
+runtime: python27
+api_version: 1
+threadsafe: true
+
+libraries:
+- name: django
+  version: 1.5
+- name: MySQLdb
+  version: latest
+
+builtins:
+- django_wsgi: on
+
+env_variables:
+  DJANGO_SETTINGS_MODULE: 'cq_stats.settings'
+
+handlers:
+- url: /static
+  static_dir: cq_stats/static
diff --git a/appengine/cq_stats/appengine_sdk_mod b/appengine/cq_stats/appengine_sdk_mod
new file mode 100755
index 0000000..4f7f469
--- /dev/null
+++ b/appengine/cq_stats/appengine_sdk_mod
@@ -0,0 +1,27 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOPLVL="$(readlink -e "$(dirname "$0")")"
+APPENGINE_SDK_DIR="$1"
+PATCH_DIR="${TOPLVL}/appengine_sdk_patches"
+SDK_PATCHES=(
+  "${PATCH_DIR}/fix-dbshell-to-use-user-password-from-settings-file.patch"
+  "${PATCH_DIR}/fix-mysql-backend-to-pass-the-ssl-options-through.patch"
+)
+
+source ${TOPLVL}/../common.sh
+
+if [[ ! -d "${APPENGINE_SDK_DIR}" ]]; then
+  error "No appengine SDK at ${APPENGINE_SDK_DIR}"
+  exit ${E_GENERAL}
+fi
+
+for sdk_patch in "${SDK_PATCHES[@]}"; do
+  echo "Applying ${sdk_patch} to ${APPENGINE_SDK_DIR}"
+  if ! patch -d "${APPENGINE_SDK_DIR}" -p 1 < "${sdk_patch}"; then
+    error "Failed to apply patch ${sdk_patch}. Bailing."
+    exit ${E_GENERAL}
+  fi
+done
diff --git a/appengine/cq_stats/appengine_sdk_patches/fix-dbshell-to-use-user-password-from-settings-file.patch b/appengine/cq_stats/appengine_sdk_patches/fix-dbshell-to-use-user-password-from-settings-file.patch
new file mode 100644
index 0000000..3ba4a9c
--- /dev/null
+++ b/appengine/cq_stats/appengine_sdk_patches/fix-dbshell-to-use-user-password-from-settings-file.patch
@@ -0,0 +1,76 @@
+From 7776458f4c723b1d1b4d796cc958fdb46cf5a03f Mon Sep 17 00:00:00 2001
+From: Prathmesh Prabhu <pprabhu@chromium.org>
+Date: Mon, 5 Jan 2015 13:44:26 +0530
+Subject: Fix dbshell to use user/password from settings file.
+
+---
+ google/storage/speckle/python/django/backend/client.py | 16 ++++++++++++++--
+ google/storage/speckle/python/tool/google_sql.py       |  7 ++++++-
+ 2 files changed, 20 insertions(+), 3 deletions(-)
+
+diff --git a/google/storage/speckle/python/django/backend/client.py b/google/storage/speckle/python/django/backend/client.py
+index 3c65897..cab320a 100644
+--- a/google/storage/speckle/python/django/backend/client.py
++++ b/google/storage/speckle/python/django/backend/client.py
+@@ -35,12 +35,24 @@ class DatabaseClient(backends.BaseDatabaseClient):
+   def runshell(self):
+     """Start an interactive database shell."""
+     settings_dict = self.connection.settings_dict
+-    args = [self.executable_name]
+-    args = ['', settings_dict.get('INSTANCE')]
++    #args = [self.executable_name]
++    args = ['']
++
++    user = settings_dict.get('USER')
++    if user:
++      args.append('--user')
++      args.append(user)
++    password = settings_dict.get('PASSWORD')
++    if password:
++      args.append('--password')
++      args.append(password)
++
++    args.append(settings_dict.get('INSTANCE'))
+     database = settings_dict.get('NAME')
+     if database:
+       args.append(database)
+
++    print('[xckd] Args for google_sql: (%s)' % args)
+
+
+
+diff --git a/google/storage/speckle/python/tool/google_sql.py b/google/storage/speckle/python/tool/google_sql.py
+index 3aa1288..91132f4 100644
+--- a/google/storage/speckle/python/tool/google_sql.py
++++ b/google/storage/speckle/python/tool/google_sql.py
+@@ -210,6 +210,10 @@ def main(argv):
+   parser.add_option('-e', '--output_encoding', dest='output_encoding',
+                     default=DEFAULT_ENCODING,
+                     help='Output encoding. Defaults to %s.' % DEFAULT_ENCODING)
++  parser.add_option('--user', dest='user',
++                    default=None, help=optparse.SUPPRESS_HELP)
++  parser.add_option('--password', dest='password',
++                    default=None, help=optparse.SUPPRESS_HELP)
+   parser.add_option('--oauth_credentials_path', dest='oauth_credentials_path',
+                     default=None, help=optparse.SUPPRESS_HELP)
+
+@@ -223,6 +227,7 @@ def main(argv):
+
+
+   instance_alias = instance.replace(':', '#')
++  print(instance_alias)
+   database = None
+   if len(args) == 2:
+     database = args[1]
+@@ -237,7 +242,7 @@ def main(argv):
+   db.add_driver(GoogleSqlDriver.NAME, GoogleSqlDriver)
+   sql_cmd_config = config.SQLCmdConfig(None)
+   sql_cmd_config.add('__googlesql__', instance_alias, None, None, database,
+-                     GoogleSqlDriver.NAME, None, None)
++                     GoogleSqlDriver.NAME, options.user, options.password)
+   sql_cmd = GoogleSqlCmd(sql_cmd_config)
+   sql_cmd.set_output_encoding(options.output_encoding)
+   sql_cmd.set_database(instance_alias)
+--
+2.2.0.rc0.207.ga3a616c
+
diff --git a/appengine/cq_stats/appengine_sdk_patches/fix-mysql-backend-to-pass-the-ssl-options-through.patch b/appengine/cq_stats/appengine_sdk_patches/fix-mysql-backend-to-pass-the-ssl-options-through.patch
new file mode 100644
index 0000000..60d20f2
--- /dev/null
+++ b/appengine/cq_stats/appengine_sdk_patches/fix-mysql-backend-to-pass-the-ssl-options-through.patch
@@ -0,0 +1,43 @@
+From fa8dc227f8b1804bb3ccae38db168f32c34cfc27 Mon Sep 17 00:00:00 2001
+From: Prathmesh Prabhu <pprabhu@chromium.org>
+Date: Tue, 6 Jan 2015 13:31:00 +0530
+Subject: Fix mysql backend to pass the ssl options through.
+
+---
+ lib/django-1.5/django/db/backends/mysql/client.py | 13 +++++++++++++
+ 1 file changed, 13 insertions(+)
+
+diff --git a/lib/django-1.5/django/db/backends/mysql/client.py b/lib/django-1.5/django/db/backends/mysql/client.py
+index 1cf8cee..f395564 100644
+--- a/lib/django-1.5/django/db/backends/mysql/client.py
++++ b/lib/django-1.5/django/db/backends/mysql/client.py
+@@ -15,6 +15,14 @@ class DatabaseClient(BaseDatabaseClient):
+         host = settings_dict['OPTIONS'].get('host', settings_dict['HOST'])
+         port = settings_dict['OPTIONS'].get('port', settings_dict['PORT'])
+         defaults_file = settings_dict['OPTIONS'].get('read_default_file')
++
++        # ssl options
++        ssl = settings_dict['OPTIONS'].get('ssl')
++        if ssl:
++          server_ca = ssl.get('ca', '')
++          client_cert = ssl.get('cert', '')
++          client_key = ssl.get('key', '')
++
+         # Seems to be no good way to set sql_mode with CLI.
+
+         if defaults_file:
+@@ -30,6 +38,11 @@ class DatabaseClient(BaseDatabaseClient):
+                 args += ["--host=%s" % host]
+         if port:
+             args += ["--port=%s" % port]
++        if ssl:
++            args += ["--ssl-ca=%s" % server_ca]
++            args += ["--ssl-cert=%s" % client_cert]
++            args += ["--ssl-key=%s" % client_key]
++
+         if db:
+             args += [db]
+
+--
+2.2.0.rc0.207.ga3a616c
+
diff --git a/appengine/cq_stats/build_annotations/__init__.py b/appengine/cq_stats/build_annotations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/__init__.py
diff --git a/appengine/cq_stats/build_annotations/build_row_controller.py b/appengine/cq_stats/build_annotations/build_row_controller.py
new file mode 100644
index 0000000..d628a59
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/build_row_controller.py
@@ -0,0 +1,289 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Controller for the build_annotations app.
+
+This controller sits between the django models for cidb tables and the views
+that power the app.
+Keep non-trivial logic to aggregate data / optimize db access here and test it.
+"""
+
+from __future__ import print_function
+
+import collections
+
+from django.db import models
+from django.db.models import query
+
+from build_annotations import models as ba_models
+
+# We need to fake out some system modules before importing chromite modules.
+from cq_stats import fake_system_modules  # pylint: disable=unused-import
+from chromite.lib import clactions
+
+
+class BuildRow(collections.MutableMapping):
+  """A database "view" that collects all relevant stats about a build."""
+
+  def __init__(self, build_entry, build_stage_entries,
+               cl_action_entries, failure_entries, annotations,
+               costly_annotations_qs):
+    """Initialize a BuildRow.
+
+    Do not use QuerySets as arguments. All query sets must have been evaluated
+    before creating this object. All data manipulation within this object is
+    pure python.
+
+    All non-trivial computation on this object should be lazy: Defer it to
+    property getters.
+    """
+    assert not isinstance(build_entry, query.QuerySet)
+    assert not isinstance(build_stage_entries, query.QuerySet)
+    assert not isinstance(cl_action_entries, query.QuerySet)
+    assert not isinstance(failure_entries, query.QuerySet)
+
+    self._data = {}
+
+    self.build_entry = build_entry
+    self._build_stage_entries = build_stage_entries
+    self._cl_action_entries = cl_action_entries
+    self._failure_entries = failure_entries
+
+    # The readonly data is accessible from this object as dict entries.
+    self['id'] = self.build_entry.id
+    self['build_number'] = self.build_entry.build_number
+    self['status'] = self.build_entry.status
+    self['summary'] = self.build_entry.summary
+    self['start_time'] = self.build_entry.start_time
+    if (self.build_entry.finish_time is not None and
+        self['start_time'] is not None):
+      self['run_time'] = self.build_entry.finish_time - self['start_time']
+    else:
+      self['run_time'] = None
+    if self['start_time'] is not None:
+      self['weekday'] = (self['start_time'].date().weekday() != 6)
+    else:
+      self['weekday'] = None
+    self['chromeos_version'] = self.build_entry.full_version
+    self['chrome_version'] = self.build_entry.chrome_version
+    self['waterfall'] = self.build_entry.waterfall
+    self['builder_name'] = self.build_entry.builder_name
+
+    failed_stages = [x.name for x in build_stage_entries if
+                     x.status == x.FAIL]
+    self['failed_stages'] = ', '.join(failed_stages)
+    self['picked_up_count'] = self._CountCLActions(
+        ba_models.ClActionTable.PICKED_UP)
+    self['submitted_count'] = self._CountCLActions(
+        ba_models.ClActionTable.SUBMITTED)
+    self['kicked_out_count'] = self._CountCLActions(
+        ba_models.ClActionTable.KICKED_OUT)
+    self['annotation_summary'] = self._SummaryAnnotations(annotations)
+    self._costly_annotations_qs = costly_annotations_qs
+
+  def GetAnnotationsQS(self):
+    """Return the queryset backing annotations.
+
+    Executing this queryset is costly because there is no way to optimize the
+    query execution.
+    Since this is a related_set queryset, that was further filtered, each item
+    in the queryset causes a db hit.
+    """
+    return self._costly_annotations_qs
+
+  def __getitem__(self, *args, **kwargs):
+    return self._data.__getitem__(*args, **kwargs)
+
+  def __iter__(self, *args, **kwargs):
+    return self._data.__iter__(*args, **kwargs)
+
+  def __len__(self, *args, **kwargs):
+    return self._data.__len__(*args, **kwargs)
+
+  def __setitem__(self, *args, **kwargs):
+    return self._data.__setitem__(*args, **kwargs)
+
+  def __delitem__(self, *args, **kwargs):
+    return self._data.__delitem__(*args, **kwargs)
+
+  def _CountCLActions(self, cl_action):
+    actions = [x for x in self._cl_action_entries if x.action == cl_action]
+    return len(actions)
+
+  def _SummaryAnnotations(self, annotations):
+    if not annotations:
+      return ''
+
+    result = '%d annotations: ' % len(annotations)
+    summaries = []
+    for annotation in annotations:
+      summary = annotation.failure_category
+      failure_message = annotation.failure_message
+      blame_url = annotation.blame_url
+      if failure_message:
+        summary += '(%s)' % failure_message[:30]
+      elif blame_url:
+        summary += '(%s)' % blame_url[:30]
+      summaries.append(summary)
+
+    result += '; '.join(summaries)
+    return result
+
+
+class BuildRowController(object):
+  """The 'controller' class that collates stats for builds.
+
+  More details here.
+  Unit-test this class please.
+  """
+
+  DEFAULT_NUM_BUILDS = 100
+
+  def __init__(self):
+    self._latest_build_id = 0
+    self._build_rows_map = {}
+
+
+  def GetStructuredBuilds(self, latest_build_id=None,
+                          num_builds=DEFAULT_NUM_BUILDS, extra_filter_q=None):
+    """The primary method to obtain stats for builds
+
+    Args:
+      latest_build_id: build_id of the latest build to query.
+      num_builds: Number of build to query.
+      extra_filter_q: An optional Q object to filter builds. Use GetQ* methods
+          provided in this class to form the filter.
+
+    Returns:
+      A list of BuildRow entries for the queried builds.
+    """
+    # If we're not given any latest_build_id, we fetch the latest builds
+    if latest_build_id is not None:
+      build_qs = ba_models.BuildTable.objects.filter(id__lte=latest_build_id)
+    else:
+      build_qs = ba_models.BuildTable.objects.all()
+
+    if extra_filter_q is not None:
+      build_qs = build_qs.filter(extra_filter_q)
+    build_qs = build_qs.order_by('-id')
+    build_qs = build_qs[:num_builds]
+
+    # Critical for performance: Prefetch all the join relations we'll need.
+    build_qs = build_qs.prefetch_related('buildstagetable_set')
+    build_qs = build_qs.prefetch_related('clactiontable_set')
+    build_qs = build_qs.prefetch_related(
+        'buildstagetable_set__failuretable_set')
+    build_qs = build_qs.prefetch_related('annotationstable_set')
+
+    # Now hit the database.
+    build_entries = [x for x in build_qs]
+
+    self._build_rows_map = {}
+    build_rows = []
+    for build_entry in build_entries:
+      build_stage_entries = [x for x in build_entry.buildstagetable_set.all()]
+      cl_action_entries = [x for x in build_entry.clactiontable_set.all()]
+      failure_entries = []
+      for entry in build_stage_entries:
+        failure_entries += [x for x in entry.failuretable_set.all()]
+      # Filter in python, filter'ing the queryset changes the queryset, and we
+      # end up hitting the database again.
+      annotations = [a for a in build_entry.annotationstable_set.all() if
+                     a.deleted == False]
+      costly_annotations_qs = build_entry.annotationstable_set.filter(
+          deleted=False)
+
+      build_row = BuildRow(build_entry, build_stage_entries, cl_action_entries,
+                           failure_entries, annotations, costly_annotations_qs)
+
+      self._build_rows_map[build_entry.id] = build_row
+      build_rows.append(build_row)
+
+    if build_entries:
+      self._latest_build_id = build_entries[0].id
+
+    return build_rows
+
+  def GetHandlingTimeHistogram(self, latest_build_id=None,
+                               num_builds=DEFAULT_NUM_BUILDS,
+                               extra_filter_q=None):
+    """Get CL handling time histogram."""
+    # If we're not given any latest_build_id, we fetch the latest builds
+    if latest_build_id is not None:
+      build_qs = ba_models.BuildTable.objects.filter(id__lte=latest_build_id)
+    else:
+      build_qs = ba_models.BuildTable.objects.all()
+
+    if extra_filter_q is not None:
+      build_qs = build_qs.filter(extra_filter_q)
+    build_qs = build_qs.order_by('-id')
+    build_qs = build_qs[:num_builds]
+
+    # Hit the database.
+    build_entries = list(build_qs)
+    claction_qs = ba_models.ClActionTable.objects.select_related('build_id')
+    claction_qs = claction_qs.filter(
+        build_id__in=set(b.id for b in build_entries))
+    # Hit the database.
+    claction_entries = [c for c in claction_qs]
+
+    claction_history = clactions.CLActionHistory(
+        self._JoinBuildTableClActionTable(build_entries, claction_entries))
+    # Convert times seconds -> minutes.
+    return {k: v / 60.0
+            for k, v in claction_history.GetPatchHandlingTimes().iteritems()}
+
+  def _JoinBuildTableClActionTable(self, build_entries, claction_entries):
+    """Perform the join operation in python.
+
+    Args:
+      build_entries: A list of buildTable entries.
+      claction_entries: A list of claction_entries.
+
+    Returns:
+      A list fo claction.CLAction objects created by joining the list of builds
+      and list of claction entries.
+    """
+    claction_entries_by_build_id = {}
+    for entry in claction_entries:
+      entries = claction_entries_by_build_id.setdefault(entry.build_id.id, [])
+      entries.append(entry)
+
+    claction_list = []
+    for build_entry in build_entries:
+      for claction_entry in claction_entries_by_build_id.get(build_entry.id,
+                                                             []):
+        claction_list.append(clactions.CLAction(
+            id=claction_entry.id,
+            build_id=build_entry.id,
+            action=claction_entry.action,
+            reason=claction_entry.reason,
+            build_config=build_entry.build_config,
+            change_number=claction_entry.change_number,
+            patch_number=claction_entry.patch_number,
+            change_source=claction_entry.change_source,
+            timestamp=claction_entry.timestamp))
+
+    return claction_list
+
+  ############################################################################
+  # GetQ* methods are intended to be used in nifty search expressions to search
+  # for builds.
+  @classmethod
+  def GetQNoAnnotations(cls):
+    """Return a Q for builds with no annotations yet."""
+    return models.Q(annotationstable__isnull=True)
+
+  @classmethod
+  def GetQRestrictToBuildConfig(cls, build_config):
+    """Return a Q for builds with the given build_config."""
+    return models.Q(build_config=build_config)
+
+  @property
+  def num_builds(self):
+    return len(self._build_rows_map)
+
+  @property
+  def latest_build_id(self):
+    return self._latest_build_id
diff --git a/appengine/cq_stats/build_annotations/fields.py b/appengine/cq_stats/build_annotations/fields.py
new file mode 100644
index 0000000..e7d840c
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/fields.py
@@ -0,0 +1,116 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Custom django model field definitions.
+
+This module defines some convenience fields and readonly versions of required
+django field types.
+"""
+
+from __future__ import print_function
+
+from django.db import models
+
+
+class BlobField(models.Field):
+  """A binary blob field."""
+  description = 'Blob'
+
+  def db_type(self, connection):
+    return 'blob'
+
+
+class EnumField(models.CharField):
+  """An enumeration field.
+
+  This is a text field that additionally provides attributes to access the
+  choices available for the enum values.
+  """
+
+  def __init__(self, *args, **kwargs):
+    choices = kwargs.get('choices', [])
+    max_length = max(len(x) for x in choices)
+    kwargs['max_length'] = max_length
+    for choice in choices:
+      setattr(self, choice.upper(), choice)
+    super(EnumField, self).__init__(*args, **kwargs)
+
+
+# For all ReadOnly* fields, set null=True
+# This allows us to use test data that has null values. Without this option,
+# tests complain during loaddata if any of the fields (that we don't care about
+# in the test itself) are null. Since this data is readonly, this data storage
+# option is irrelevant in prod.
+
+class ReadOnlyIntegerField(models.IntegerField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyIntegerField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyBooleanField(models.NullBooleanField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    super(ReadOnlyBooleanField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyDateTimeField(models.DateTimeField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyDateTimeField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyForeignKey(models.ForeignKey):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyForeignKey, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyCharField(models.CharField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    kwargs['max_length'] = 1024
+    super(ReadOnlyCharField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyBlobField(BlobField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyBlobField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyEnumField(ReadOnlyCharField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+
+
+class ReadOnlyURLField(models.URLField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyURLField, self).__init__(*args, **kwargs)
diff --git a/appengine/cq_stats/build_annotations/forms.py b/appengine/cq_stats/build_annotations/forms.py
new file mode 100644
index 0000000..852394a
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/forms.py
@@ -0,0 +1,37 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Forms used by the build_annotations app."""
+
+from __future__ import print_function
+
+from django import forms
+
+from build_annotations import models as ba_models
+
+
+class SearchForm(forms.Form):
+  """Form to limit builds shown on the landing page."""
+  latest_build_id = forms.IntegerField()
+  num_builds = forms.IntegerField(label='Number of results')
+
+
+class AnnotationsForm(forms.ModelForm):
+  """Form to add/edit a single annotation to a build."""
+
+  # pylint: disable=no-init, old-style-class
+  class Meta:
+    """Set meta options for the form."""
+    model = ba_models.AnnotationsTable
+    fields = ['failure_category', 'failure_message', 'blame_url', 'notes',
+              'deleted']
+
+
+# NB: Explicitly set can_delete=False for clarity.
+# Due to a bug in (< django-1.7), models get deleted when the formset is saved
+# even if we request not to commit changes.
+AnnotationsFormSet = forms.models.modelformset_factory(
+    ba_models.AnnotationsTable,
+    form=AnnotationsForm,
+    can_delete=False)
diff --git a/appengine/cq_stats/build_annotations/models.py b/appengine/cq_stats/build_annotations/models.py
new file mode 100644
index 0000000..fc860d6
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/models.py
@@ -0,0 +1,224 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Django models for cidb tables."""
+
+from __future__ import print_function
+
+from django.db import models
+
+from build_annotations import fields as ba_fields
+
+
+class BaseModel(models.Model):
+  """Abstract base class to store all app-wide Meta options."""
+
+  class Meta(object):
+    """Set meta options for all models in this module."""
+    # This property not inherited.
+    abstract = True
+
+    # The schema for CIDB is maintained external to this app.
+    managed = False
+    # Allow us to split the models.py file into different modules.
+    app_label = 'cq_stats_sheet'
+    # Each model should explicitly set this option. The default django model to
+    # table name mapping does not work for us.
+    db_table = 'Please define me'
+
+  def __iter__(self):
+    for field_name in self._meta.get_all_field_names():
+      value = None
+      if hasattr(self, field_name):
+        value = getattr(self, field_name)
+      yield field_name, value
+
+  def __unicode__(self):
+    result = []
+    for _, value in self:
+      result.append(unicode(value))
+    return u', '.join(result)
+
+  def __str__(self):
+    return str(unicode(self))
+
+
+class BuildTable(BaseModel):
+  """Model for cidb.buildTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'buildTable'
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  last_updated = ba_fields.ReadOnlyDateTimeField()
+  master_build_id = ba_fields.ReadOnlyForeignKey('self',
+                                                 db_column='master_build_id')
+  buildbot_generation = ba_fields.ReadOnlyIntegerField()
+  builder_name = ba_fields.ReadOnlyCharField()
+  waterfall = ba_fields.ReadOnlyCharField()
+  build_number = ba_fields.ReadOnlyIntegerField()
+  build_config = ba_fields.ReadOnlyCharField()
+  bot_hostname = ba_fields.ReadOnlyCharField()
+  start_time = ba_fields.ReadOnlyDateTimeField()
+  finish_time = ba_fields.ReadOnlyDateTimeField()
+  status = ba_fields.ReadOnlyCharField()
+  status_pickle = ba_fields.ReadOnlyBlobField()
+  build_type = ba_fields.ReadOnlyCharField()
+  chrome_version = ba_fields.ReadOnlyCharField()
+  milestone_version = ba_fields.ReadOnlyCharField()
+  platform_version = ba_fields.ReadOnlyCharField()
+  full_version = ba_fields.ReadOnlyCharField()
+  sdk_version = ba_fields.ReadOnlyCharField()
+  toolchain_url = ba_fields.ReadOnlyURLField()
+  final = ba_fields.ReadOnlyBooleanField()
+  metadata_url = ba_fields.ReadOnlyURLField()
+  summary = ba_fields.ReadOnlyCharField()
+  deadline = ba_fields.ReadOnlyDateTimeField()
+
+
+class BuildStageTable(BaseModel):
+  """Model for cidb.buildStageTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'buildStageTable'
+
+  # Not used directly in field definition for readonly tables, but used
+  # elsewhere as constants.
+  FAIL = 'fail'
+  PASS = 'pass'
+  INFLIGHT = 'inflight'
+  MISSING = 'missing'
+  PLANNED = 'planned'
+  SKIPPED = 'skipped'
+  FORGIVEN = 'forgiven'
+  STATUS_CHOICES = (
+      (FAIL, 'Stage failed'),
+      (PASS, 'Stage passed! Hurray!'),
+      (INFLIGHT, 'Stage is inflight'),
+      (MISSING, 'Status missing'),
+      (PLANNED, 'Stage is planned'),
+      (SKIPPED, 'Stage skipped'),
+      (FORGIVEN, 'Stage failed but forgiven'))
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  build_id = ba_fields.ReadOnlyForeignKey('BuildTable',
+                                          db_column='build_id')
+  name = ba_fields.ReadOnlyCharField()
+  board = ba_fields.ReadOnlyCharField()
+  status = ba_fields.ReadOnlyEnumField()
+  last_updated = ba_fields.ReadOnlyDateTimeField()
+  start_time = ba_fields.ReadOnlyDateTimeField()
+  finish_time = ba_fields.ReadOnlyDateTimeField()
+  final = ba_fields.ReadOnlyBooleanField()
+
+
+class ClActionTable(BaseModel):
+  """Model for cidb.clActionTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'clActionTable'
+
+  # Not used directly in field definition for readonly tables, but used
+  # elsewhere as constants.
+  PICKED_UP = 'picked_up'
+  SUBMITTED = 'submitted'
+  KICKED_OUT = 'kicked_out'
+  SUBMIT_FAILED = 'submit_failed'
+  VERIFIED = 'verified'
+  FORGIVEN = 'forgiven'
+  # This list of choices is not exhaustive yet. It's only enough for CQ stats.
+  ACTION_CHOICES = (
+      (PICKED_UP, 'CL picked up by CQ'),
+      (SUBMITTED, 'CL submitted by CQ'),
+      (KICKED_OUT, 'CL kicked out by CQ'),
+      (SUBMIT_FAILED, 'CQ failed to submit CL'),
+      (VERIFIED, 'CL verified by CQ'),
+      (FORGIVEN, 'CQ run failed, but CL forgiven'))
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  build_id = ba_fields.ReadOnlyForeignKey('BuildTable',
+                                          db_column='build_id')
+  change_number = ba_fields.ReadOnlyIntegerField()
+  patch_number = ba_fields.ReadOnlyIntegerField()
+  change_source = ba_fields.ReadOnlyEnumField()
+  action = ba_fields.ReadOnlyEnumField()
+  reason = ba_fields.ReadOnlyCharField()
+  timestamp = ba_fields.ReadOnlyDateTimeField()
+
+
+class FailureTable(BaseModel):
+  """Model for cidb.failureTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'failureTable'
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  build_stage_id = ba_fields.ReadOnlyForeignKey('BuildStageTable',
+                                                db_column='build_stage_id')
+  outer_failure_id = ba_fields.ReadOnlyForeignKey('self',
+                                                  db_column='outer_failure_id')
+  exception_type = ba_fields.ReadOnlyCharField()
+  exception_message = ba_fields.ReadOnlyCharField()
+  exception_category = ba_fields.ReadOnlyEnumField()
+  extra_info = ba_fields.ReadOnlyCharField()
+  timestamp = ba_fields.ReadOnlyDateTimeField()
+
+
+class AnnotationsTable(BaseModel):
+  """Model for cidb.annotationsTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'annotationsTable'
+
+  BAD_CL = 'bad_cl'
+  BUG_IN_TOT = 'bug_in_tot'
+  MERGE_CONFLICT = 'merge_conflict'
+  TREE_CONFLICT = 'tree_conflict'
+  SCHEDULED_ABORT = 'scheduled_abort'
+  CL_NOT_READY = 'cl_not_ready'
+  BAD_CHROME = 'bad_chrome'
+  TEST_FLAKE = 'test_flake'
+  GERRIT_FAILURE = 'gerrit_failure'
+  GS_FAILURE = 'gs_failure'
+  LAB_FAILURE = 'lab_failure'
+  BAD_BINARY_FAILURE = 'bad_binary_failure'
+  BUILD_FLAKE = 'build_flake'
+  INFRA_FAILURE = 'infra_failure'
+  MYSTERY = 'mystery'
+  FAILURE_CATEGORY_CHOICES = (
+      (BAD_CL, 'Bad CL (Please specify CL)'),
+      (BUG_IN_TOT, 'Bug in ToT (Please specify bug)'),
+      (MERGE_CONFLICT, 'Merge conflict'),
+      (TREE_CONFLICT, 'Tree conflict'),
+      (SCHEDULED_ABORT, 'Scheduled Abort'),
+      (CL_NOT_READY, 'CL was marked not ready (Please specify CL)'),
+      (BAD_CHROME, 'Bad chrome (Please speficy bug)'),
+      (TEST_FLAKE, 'Test flake'),
+      (GERRIT_FAILURE, 'Gerrit failure'),
+      (GS_FAILURE, 'Google Storage failure'),
+      (LAB_FAILURE, 'Lab failure'),
+      (BAD_BINARY_FAILURE, 'Bad binary packages'),
+      (BUILD_FLAKE, 'Local build flake'),
+      (INFRA_FAILURE, 'Other Infrastructure failure'),
+      (MYSTERY, 'Unknown failure: MyStErY'))
+
+  # Warning: Some field constraints are duplicated here from the database
+  # schema in CIDB.
+  id = models.AutoField(primary_key=True)
+  build_id = models.ForeignKey('BuildTable', db_column='build_id')
+  last_updated = models.DateTimeField(auto_now=True)
+  last_annotator = models.CharField(max_length=80)
+  failure_category = models.CharField(
+      max_length=max(len(x) for x, y in FAILURE_CATEGORY_CHOICES),
+      choices=FAILURE_CATEGORY_CHOICES,
+      default='mystery')
+  failure_message = models.CharField(max_length=1024, blank=True, null=True)
+  blame_url = models.CharField(max_length=80, blank=True, null=True)
+  notes = models.CharField(max_length=1024, blank=True, null=True)
+  deleted = models.BooleanField(default=False, null=False)
diff --git a/appengine/cq_stats/build_annotations/static/build_annotations/base.css b/appengine/cq_stats/build_annotations/static/build_annotations/base.css
new file mode 100644
index 0000000..a95a00e
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/static/build_annotations/base.css
@@ -0,0 +1,100 @@
+/* Copyright 2015 The Chromium OS Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+body {
+  margin-left: 20px;
+  margin-right: 20px;
+}
+
+/* Navigation bar */
+ul.navbar {
+  list-style-type: none;
+  margin: 0;
+  padding: 0;
+  overflow: hidden;
+}
+li.navbar {
+  float: left;
+  padding: 5px;
+}
+a.navbar {
+  display: block;
+  width: 150px;
+  background-color: #B2CFB9;
+  font-weight: bold;
+  text-align: center;
+}
+
+td, th {
+  text-align: left;
+}
+ul.errorlist {
+  color: #DF0101;
+  font-weight: bold;
+}
+table {
+  margin-top: 30px;
+  margin-bottom: 30px;
+}
+h1, h2, h3 {
+  color: #0B3B17;
+}
+
+table.build_list, table.annotations_list {
+  border: 1px solid green;
+  width: 100%;
+  display: inline;
+}
+th.build_list, th.annotations_list {
+  background-color: #0B3B17;
+  color: #F2F5A9;
+  text-align: center;
+}
+td.build_list, td.annotations_list {
+  padding-left: 10px;
+  padding-right: 10px;
+  text-align: left;
+  display: inherit;
+}
+tr.build_item_pass {
+  background-color: #B2DF99;
+}
+tr.build_item_inflight {
+  background-color: #DDDF99;
+}
+tr.build_item_fail {
+  background-color: #DF99A9;
+}
+tr.build_item_aborted {
+  background-color: #C499DF;
+}
+
+th.build_details, td.build_details {
+  padding-right: 30px;
+  background-color: #E6E6E6;
+}
+
+.hidden {
+  display: none;
+}
+.shown {
+  display: block;
+}
+
+a.external_link:after {
+  display: inline-block;
+  content: "";
+  width: 15px;
+  height: 15px;
+  background: transparent url("images/popup_link.png") no-repeat;
+}
+
+div.centered_div_container {
+  text-align: center;
+}
+div.centered_div_block {
+  display: inline-block;
+  margin: auto;
+}
diff --git a/appengine/cq_stats/build_annotations/static/build_annotations/base.js b/appengine/cq_stats/build_annotations/static/build_annotations/base.js
new file mode 100644
index 0000000..846b84d
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/static/build_annotations/base.js
@@ -0,0 +1,69 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+window.onload = function() {
+  localize_times()
+}
+
+function editAnnotation(base_id_str) {
+  document.getElementById("annotation_" + base_id_str + "_noedit").className =
+      "hidden";
+  document.getElementById("annotation_" + base_id_str + "_edit").className = "";
+  return false;
+}
+
+function populateLatest() {
+  var latest_build_id = document.getElementById("latest_build_id_cached").value;
+  document.getElementById("id_latest_build_id").value = latest_build_id;
+  return false;
+}
+
+// Copied/forked from
+// https://chromium.googlesource.com/infra/infra.git/+/master/appengine/chromium_status/static/js/main/main.js
+function localize_times() {
+  // Localize all the UTC timestamps coming from the server to whatever
+  // the user has set in their browser.
+  require(["dojo/date/locale"], function(locale) {
+    function format(date, datePattern, timePattern) {
+      // The dojo guys like to add a sep between the date and the time
+      // fields for us (based on locale).  Since we want a standards
+      // format, that sep is pure noise, so kill it with {...}.
+      // https://bugs.dojotoolkit.org/ticket/17544
+      return locale.format(new Date(date), {
+          formatLength: 'short',
+          datePattern: datePattern + '{',
+          timePattern: '}' + timePattern
+        }).replace(/{.*}/, ' ');
+    }
+    function long_date(date) { // RFC2822
+      return format(date, 'EEE, dd MMM yyyy', 'HH:mm:ss z');
+    }
+    function short_date(date) {
+      return format(date, 'EEE, dd MMM', 'HH:mm');
+    }
+    var now = new Date();
+    var curr_year = now.getFullYear();
+    var tzname = locale.format(now, {
+        selector: 'time',
+        timePattern: 'z'
+      });
+    var i, elements;
+    // Convert all the fields that have a timezone already.
+    elements = document.getElementsByName('date.datetz');
+    for (i = 0; i < elements.length; ++i)
+      elements[i].innerText = long_date(elements[i].innerText);
+    // Convert all the fields that lack a timezone (which we know is UTC).
+    // We'll assume the timestamps represent the current year as it'll only
+    // really affect the short day-of-week name, and even then it'll only be
+    // slightly off during the ~1st week of January.
+    elements = document.getElementsByName('date.date');
+    for (i = 0; i < elements.length; ++i)
+      elements[i].innerText = short_date(elements[i].innerText + ' ' + curr_year
+                                         + ' UTC');
+    // Convert all the fields that are just a timezone.
+    elements = document.getElementsByName('date.tz');
+    for (i = 0; i < elements.length; ++i)
+      elements[i].innerText = tzname;
+  });
+}
diff --git a/appengine/cq_stats/build_annotations/static/build_annotations/images/popup_link.png b/appengine/cq_stats/build_annotations/static/build_annotations/images/popup_link.png
new file mode 100644
index 0000000..78e8f9d
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/static/build_annotations/images/popup_link.png
Binary files differ
diff --git a/appengine/cq_stats/build_annotations/templates/build_annotations/base.html b/appengine/cq_stats/build_annotations/templates/build_annotations/base.html
new file mode 100644
index 0000000..52d7575
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templates/build_annotations/base.html
@@ -0,0 +1,28 @@
+<html lang="en">
+  <head>
+  {% load staticfiles %}
+  <link rel="stylesheet" type="text/css" href="{% static 'build_annotations/base.css' %}"/>
+  <script type="text/javascript"
+          daja-dojo-config="async:true"
+          src="//ajax.googleapis.com/ajax/libs/dojo/1.10.3/dojo/dojo.js"></script>
+  <script type="text/javascript" src="{% static 'build_annotations/base.js' %}"></script>
+
+  {% block template-private-imports %}
+  <!-- This is especially needed for the django 'load' command which works like
+       python import - loaded files are private to the template -->
+  {% endblock %}
+  </head>
+
+  <body>
+    {% block welcome-header %}
+    <p style="text-align: right">Welcome {{ username }}! 'notate 'em all!</p>
+    {% endblock %}
+
+    {% block content %}
+    <p style="color: red; layout: block">
+      All the page's real content should have gone here.
+      If you're reading me, someone forgot to do their homework.
+    </p>
+    {% endblock %}  <!-- content -->
+  </body>
+</html>
diff --git a/appengine/cq_stats/build_annotations/templates/build_annotations/edit_annotations.html b/appengine/cq_stats/build_annotations/templates/build_annotations/edit_annotations.html
new file mode 100644
index 0000000..663611d
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templates/build_annotations/edit_annotations.html
@@ -0,0 +1,111 @@
+{% extends 'build_annotations/base.html' %}
+
+{% block template-private-imports %}
+{% load build_annotations_filters %}
+{% endblock %}
+
+{% block content %}
+<h1>Annotate {{ build_config|title }} Build {{ build_row.id }}</h1>
+<ul class="navbar">
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' build_config %}">Builds List</a>
+  </li>
+</ul>
+
+<table class="build_details">
+  <tr>
+    <th class="build_details">Build Number</th>
+    <td class="build_details"><a class="external_link" href="https://uberchromegw.corp.google.com/i/{{ build_row.waterfall }}/builders/{{ build_row.builder_name }}/builds/{{ build_row.build_number }}" target="_blank" rel="nofollow">{{ build_row.build_number }}</a></td>
+  </tr>
+  <tr>
+    <th class="build_details">Status</th>
+    <td class="build_details">{{ build_row.status }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Summary</th>
+    <td class="build_details">{{ build_row.summary|linebreaksbr }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Start Time</th>
+    <td class="build_details" name="date.datetz">{{ build_row.start_time|date:"D, d M Y H:i" }} UTC</td>
+  </tr>
+  <tr>
+    <th class="build_details">Runtime</th>
+    <td class="build_details">{{ build_row.run_time }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Weekday</th>
+    <td class="build_details">{{ build_row.weekday }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">ChromeOS Version</th>
+    <td class="build_details">{{ build_row.chromeos_version }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Chrome Version</th>
+    <td class="build_details">{{ build_row.chrome_version }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Failed Stages</th>
+    <td class="build_details">{{ build_row.failed_stages }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">CLs Picked Up</th>
+    <td class="build_details">{{ build_row.picked_up_count }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">CLs Submitted</th>
+    <td class="build_details">{{ build_row.submitted_count }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">CLs Kicked Out</th>
+    <td class="build_details">{{ build_row.kicked_out_count }}</td>
+  </tr>
+</table>
+<form action="{% url 'build_annotations:edit_annotations' build_config build_row.id %}"
+      method="post">
+  {% csrf_token %}
+  {{ annotations_formset.management_form }}
+  <table class="annotations_list">
+    <tr>
+    {% for form in annotations_formset %}
+      {% if forloop.first %}
+        <tr>
+          <th class="annotations_list">Failure Category</th>
+          <th class="annotations_list">Failure Message</th>
+          <th class="annotations_list">Blame URL</th>
+          <th class="annotations_list">Notes</th>
+          <th class="annotations_list">Update</th>
+        </tr>
+      {% endif %}
+      {% for hidden in form.hidden_fields %}
+        {{ hidden }}
+      {% endfor %}
+
+      {% if not forloop.last %}
+        <tr id="annotation_{{ forloop.counter }}_noedit">
+          <td>{{ form.failure_category.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td>{{ form.failure_message.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td>{{ form.blame_url.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td>{{ form.notes.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td><a href="javascript:void(0)" onclick="editAnnotation('{{ forloop.counter }}')">edit</a>
+        </tr>
+      {% endif %}
+
+      {% if not forloop.last %}
+        <tr id="annotation_{{ forloop.counter }}_edit" class="hidden">
+      {% else %}
+        <tr id="annotation_{{ forloop.counter }}_edit">
+      {% endif %}
+            <td class="annotations_list">{{ form.failure_category }}<br><div class="errorlist">{{ form.failure_category.errors }}</div></td>
+            <td class="annotations_list">{{ form.failure_message }}<br><div class="errorlist">{{ form.failure_message.errors }}</div></td>
+            <td class="annotations_list">{{ form.blame_url }}<br><div class="errorlist">{{ form.blame_url.errors }}</div></td>
+            <td class="annotations_list">{{ form.notes }}<br><div class="errorlist">{{ form.notes.errors }}</div></td>
+            <td class="annotations_list">Mark for deletion: {{ form.deleted }}<br><div class="errorlist">{{ form.deleted.errors }}</div></td>
+        </tr>
+    {% endfor %}
+  </table>
+  <input type="submit" value="Save Changes"/>
+</form>
+{% endblock %}  <!-- content -->
+
diff --git a/appengine/cq_stats/build_annotations/templates/build_annotations/index.html b/appengine/cq_stats/build_annotations/templates/build_annotations/index.html
new file mode 100644
index 0000000..4591d2b
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templates/build_annotations/index.html
@@ -0,0 +1,108 @@
+{% extends 'build_annotations/base.html' %}
+
+{% block template-private-imports %}
+  <!-- Call this once on every web page. -->
+  <script type="text/javascript" src="https://www.google.com/jsapi"></script>
+
+  <script type="text/javascript">
+    // Load the Google Charts "core" charts (bar, line, pie...)
+    google.load("visualization", '1', {packages:['corechart']});
+    // When the page loads call the drawChart() function.
+    google.setOnLoadCallback(drawChart);
+
+    function drawChart() {
+      var data = google.visualization.arrayToDataTable([          // The chart data.
+        ['CL', 'Handling Time (minutes)'],
+        {% for key, value in histogram_data.items %}
+        ['{{ key }}', {{ value }}],
+        {% endfor %}
+      ]);
+
+      var options = {      // Customize a few of Google Charts' hundreds of options.
+        title: 'Histogram: Patch handling times',
+        width: 600,
+        height: 400,
+        hAxis: { title: 'Patch handling time', },
+        vAxis: { title: '# Patches', },
+      };
+
+      // Create and draw a Google Column Chart.
+      // To experiment with different types of charts, replace "ColumnChart" with
+      // the desired chart type (e.g., "PieChart", "LineChart").
+      var chart = new google.visualization.Histogram(document.getElementById('chart_div'));
+      google.visualization.events.addListener(chart, 'ready', function() {
+        document.getElementById('chart_img_div').innerHTML = '<a href="' + chart.getImageURI() + '">Save Chart</a>'
+      })
+      chart.draw(data, options);
+    }
+  </script>
+
+{% endblock %}  <!-- template-private-impots -->
+{% block content %}
+<h1> {{ build_config|title }} Builds</h1>
+<ul class="navbar">
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' 'master-paladin' %}">Master-Paladin</a>
+  </li>
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' 'master-release' %}">Master-Release</a>
+  </li>
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' 'master-chromium-pfq' %}">Chromium-PFQ</a>
+  </li>
+</ul>
+<form action="{% url 'build_annotations:builds_list' build_config %}" method="post">
+  {% csrf_token %}
+  <table>
+    <tr>
+      <th><label>{{ search_form.latest_build_id.label }}</label></th>
+      <td>{{ search_form.latest_build_id }}</td>
+      <td>
+        <button type="button" onclick="populateLatest()">Get latest</button>
+        <input type="hidden" id="latest_build_id_cached" value="{{ latest_build_id_cached }}"/>
+      </td>
+      <td class="error_message">{{ search_form.latest_build_id.errors }}</td>
+    </tr>
+    <tr>
+      <th><label>{{ search_form.num_builds.label }}</label></th>
+      <td>{{ search_form.num_builds }}</td>
+      <td class="error_message">{{ search_form.num_builds.errors }}</td>
+    </tr>
+  </table>
+  <input type="submit" value="Update List"/>
+</form>
+
+<div id='chart_div_container' class='centered_div_container'>
+  <div id='chart_div' class='centered_div_block'></div>
+  <div id='chart_img_div' class='centered_div_block'></div>
+</div>
+
+<table class="build_list">
+  <tr>
+    <th class="build_list">Build ID</th>
+    <th class="build_list">Build Number</th>
+    <th class="build_list">Status</th>
+    <th class="build_list">Summary</th>
+    <th class="build_list">Start Time</th>
+    <th class="build_list">Runtime</th>
+    <th class="build_list">CLs Picked Up</th>
+    <th class="build_list">Submitted</th>
+    <th class="build_list">Rejected</th>
+    <th class="build_list">Annotation Summary</th>
+  </tr>
+{% for build_row in builds_list %}
+  <tr class="build_item_{{ build_row.status }}">
+    <td class="build_list"><a href="{% url 'build_annotations:edit_annotations' build_config build_row.id %}">{{ build_row.id }}</a></td>
+    <td class="build_list">{{ build_row.build_number }}</td>
+    <td class="build_list">{{ build_row.status|truncatechars:20 }}</td>
+    <td class="build_list">{{ build_row.summary|truncatechars:50|cut:'\n\r' }}</td>
+    <td class="build_list" name="date.datetz">{{ build_row.start_time|date:"D, d M Y H:i" }} UTC</td>
+    <td class="build_list">{{ build_row.run_time }}</td>
+    <td class="build_list">{{ build_row.picked_up_count }}</td>
+    <td class="build_list">{{ build_row.submitted_count }}</td>
+    <td class="build_list">{{ build_row.kicked_out_count }}</td>
+    <td class="build_list">{{ build_row.annotation_summary|truncatechars:150 }}</td>
+  </tr>
+{% endfor %}
+</table>
+{% endblock %}  <!-- content -->
diff --git a/appengine/cq_stats/build_annotations/templatetags/__init__.py b/appengine/cq_stats/build_annotations/templatetags/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templatetags/__init__.py
diff --git a/appengine/cq_stats/build_annotations/templatetags/build_annotations_filters.py b/appengine/cq_stats/build_annotations/templatetags/build_annotations_filters.py
new file mode 100644
index 0000000..771feaf
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templatetags/build_annotations_filters.py
@@ -0,0 +1,46 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Custom template tags for the build_annotations app."""
+
+from __future__ import print_function
+
+from django import template
+from django.template import defaultfilters
+from django.utils import safestring
+
+register = template.Library()
+
+
+@register.filter(needs_autoescape=True, is_safe=True)
+@defaultfilters.stringfilter
+def crosurlize(value, autoescape=None):
+  """URLize strings.
+
+  This builds on top of the url'ize function from django. In addition, it
+  creates links for cros specific regexs.
+
+  TODO(pprabhu) This should be merged with the (much more thorough) urlize
+  functionality in the chromium_status AE app.
+  """
+  words = value.split(' ')
+  for i in xrange(len(words)):
+    is_url = False
+    word = words[i]
+    if (word.startswith('crbug.com/') or word.startswith('crosreview.com/') or
+        word.startswith('b/')):
+      parts = word.split('/')
+      if len(parts) == 2:
+        try:
+          int(parts[1])
+          is_url = True
+        except ValueError:
+          pass
+
+    if is_url:
+      # In-place urlize.
+      words[i] = '<a href="http://%s" rel="nofollow">%s</a>' % (word, word)
+
+  value = safestring.mark_safe(' '.join(words))
+  return defaultfilters.urlize(value, autoescape)
diff --git a/appengine/cq_stats/build_annotations/urls.py b/appengine/cq_stats/build_annotations/urls.py
new file mode 100644
index 0000000..21a643b
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/urls.py
@@ -0,0 +1,25 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Url disptacher for the build_annotations app."""
+
+from __future__ import print_function
+
+from django import http
+from django.conf import urls
+
+from build_annotations import views
+
+
+urlpatterns = urls.patterns(
+    '',
+    urls.url(r'^$',
+             lambda r: http.HttpResponseRedirect(
+                 'builds_list/master-paladin/')),
+    urls.url(r'^builds_list/(?P<build_config>[\w-]+)/$',
+             views.ListBuildsView.as_view(),
+             name='builds_list'),
+    urls.url(r'edit_annotations/(?P<build_config>[\w-]+)/(?P<build_id>\d+)/$',
+             views.EditAnnotationsView.as_view(),
+             name='edit_annotations'))
diff --git a/appengine/cq_stats/build_annotations/views.py b/appengine/cq_stats/build_annotations/views.py
new file mode 100644
index 0000000..10c0f1f
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/views.py
@@ -0,0 +1,191 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""All django views for the build_annotations app."""
+
+from __future__ import print_function
+
+from django import http
+from django import shortcuts
+from django.core import urlresolvers
+from django.views import generic
+from google.appengine.api import users
+
+from build_annotations import build_row_controller
+from build_annotations import models as ba_models
+from build_annotations import forms as ba_forms
+
+
+_DEFAULT_USERNAME = "SomeoneGotHereWithoutLoggingIn"
+
+class ListBuildsView(generic.list.ListView):
+  """The landing page view of the app. Lists requested builds."""
+
+  template_name = 'build_annotations/index.html'
+
+  def __init__(self, *args, **kwargs):
+    super(ListBuildsView, self).__init__(*args, **kwargs)
+    self._username = _DEFAULT_USERNAME
+    self._build_config = None
+    self._search_form = None
+    self._controller = None
+    self._session = None
+    self._builds_list = None
+    self._hist = None
+
+  def get_queryset(self):
+    self._EnsureSessionInitialized()
+    self._controller = build_row_controller.BuildRowController()
+    build_config_q = None
+    if self._build_config is not None:
+      build_config_q = self._controller.GetQRestrictToBuildConfig(
+          self._build_config)
+    self._builds_list = self._controller.GetStructuredBuilds(
+        latest_build_id=self._session['latest_build_id'],
+        num_builds=self._session['num_builds'],
+        extra_filter_q=build_config_q)
+    self._hist = self._controller.GetHandlingTimeHistogram(
+        latest_build_id=self._session['latest_build_id'],
+        num_builds=self._session['num_builds'],
+        extra_filter_q=build_config_q)
+    return self._builds_list
+
+  def get_context_object_name(self, _):
+    return 'builds_list'
+
+  def get_context_data(self, **kwargs):
+    context = super(ListBuildsView, self).get_context_data(**kwargs)
+    context['username'] = self._username
+    context['search_form'] = self._GetSearchForm()
+    context['latest_build_id_cached'] = self._GetLatestBuildId()
+    context['build_config'] = self._build_config
+    context['histogram_data'] = self._hist
+    return context
+
+  # pylint: disable=arguments-differ
+  def get(self, request, build_config=None):
+    # We're assured that a username exists in prod because our app sits behind
+    # appengine login. Not so when running from dev_appserver.
+    self._username = users.get_current_user()
+    self._session = request.session
+    self._build_config = build_config
+    return super(ListBuildsView, self).get(request)
+
+  def post(self, request, build_config):
+    self._session = request.session
+    form = ba_forms.SearchForm(request.POST)
+    self._search_form = form
+    if form.is_valid():
+      self._session['latest_build_id'] = form.cleaned_data['latest_build_id']
+      self._session['num_builds'] = form.cleaned_data['num_builds']
+    return self.get(request, build_config)
+
+  def put(self, *args, **kwargs):
+    return self.post(*args, **kwargs)
+
+  def _GetSearchForm(self):
+    if self._search_form is not None:
+      return self._search_form
+    return ba_forms.SearchForm(
+        {'latest_build_id': self._session['latest_build_id'],
+         'num_builds': self._session['num_builds']})
+
+  def _EnsureSessionInitialized(self):
+    latest_build_id = self._session.get('latest_build_id', None)
+    num_results = self._session.get('num_builds', None)
+    if latest_build_id is None or num_results is None:
+      # We don't have a valid search history in this session, obtain defaults.
+      controller = build_row_controller.BuildRowController()
+      controller.GetStructuredBuilds(num_builds=1)
+      self._session['latest_build_id'] = controller.latest_build_id
+      self._session['num_builds'] = controller.DEFAULT_NUM_BUILDS
+
+  def _GetLatestBuildId(self):
+    controller = build_row_controller.BuildRowController()
+    controller.GetStructuredBuilds(num_builds=1)
+    return controller.latest_build_id
+
+
+class EditAnnotationsView(generic.base.View):
+  """View that handles annotation editing page."""
+
+  template_name = 'build_annotations/edit_annotations.html'
+
+  def __init__(self, *args, **kwargs):
+    self._username = _DEFAULT_USERNAME
+    self._formset = None
+    self._context = {}
+    self._request = None
+    self._session = None
+    self._build_config = None
+    self._build_id = None
+    super(EditAnnotationsView, self).__init__(*args, **kwargs)
+
+  def get(self, request, build_config, build_id):
+    # We're assured that a username exists in prod because our app sits behind
+    # appengine login. Not so when running from dev_appserver.
+    self._username = users.get_current_user()
+    self._request = request
+    self._build_config = build_config
+    self._build_id = build_id
+    self._session = request.session
+    self._PopulateContext()
+    return shortcuts.render(request, self.template_name, self._context)
+
+  def post(self, request, build_config, build_id):
+    # We're assured that a username exists in prod because our app sits behind
+    # appengine login. Not so when running from dev_appserver.
+    self._username = users.get_current_user()
+    self._request = request
+    self._build_config = build_config
+    self._build_id = build_id
+    self._session = request.session
+    self._formset = ba_forms.AnnotationsFormSet(request.POST)
+    if self._formset.is_valid():
+      self._SaveAnnotations()
+      return http.HttpResponseRedirect(
+          urlresolvers.reverse('build_annotations:edit_annotations',
+                               args=[self._build_config, self._build_id]))
+    else:
+      self._PopulateContext()
+      return shortcuts.render(request, self.template_name, self._context)
+
+  def _PopulateContext(self):
+    build_row = self._GetBuildRow()
+    if build_row is None:
+      raise http.Http404
+
+    self._context = {}
+    self._context['username'] = self._username
+    self._context['build_config'] = self._build_config
+    self._context['build_row'] = build_row
+    self._context['annotations_formset'] = self._GetAnnotationsFormSet()
+
+  def _GetBuildRow(self):
+    controller = build_row_controller.BuildRowController()
+    build_row_list = controller.GetStructuredBuilds(
+        latest_build_id=self._build_id,
+        num_builds=1)
+    if not build_row_list:
+      return None
+    return build_row_list[0]
+
+  def _GetAnnotationsFormSet(self):
+    if self._formset is None:
+      build_row = self._GetBuildRow()
+      if build_row is not None:
+        queryset = build_row.GetAnnotationsQS()
+      else:
+        queryset = ba_models.AnnotationsTable.objects.none()
+      self._formset = ba_forms.AnnotationsFormSet(queryset=queryset)
+    return self._formset
+
+  def _SaveAnnotations(self):
+    models_to_save = self._formset.save(commit=False)
+    build_row = self._GetBuildRow()
+    for model in models_to_save:
+      if not hasattr(model, 'build_id') or model.build_id is None:
+        model.build_id = build_row.build_entry
+      model.last_annotator = self._username
+      model.save()
diff --git a/appengine/cq_stats/cq_stats/__init__.py b/appengine/cq_stats/cq_stats/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/__init__.py
diff --git a/appengine/cq_stats/cq_stats/fake_system_modules.py b/appengine/cq_stats/cq_stats/fake_system_modules.py
new file mode 100644
index 0000000..a7393cc
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/fake_system_modules.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fake out system python modules that are not available on AE.
+
+Chromite imports some standard python modules that are not available on the
+restricted sandbox environment on appengine. Fake out this modules such that
+imports don't break, but any attempt to use the modules blows up obviously.
+"""
+
+from __future__ import print_function
+
+import os
+
+
+FAKE_HOMEDIR = '/tmp/an_obviously_non_existent_home_dir'
+def _expanduser(_):
+  """A fake implementation of os.path.expanduser.
+
+  os.path.expanduser needs to import 'pwd' that is not available on appengine.
+  In fact, the concept of HOMEDIR doesn't make sense at all. So, patch it to
+  return a safe fake path. If we try to use it anywhere, it will fail obviously.
+  """
+  return FAKE_HOMEDIR
+
+
+# Importing this module has the side effect of patching all of the following
+# library modules / classes / functions.
+os.path.expanduser = _expanduser
diff --git a/appengine/cq_stats/cq_stats/middleware.py b/appengine/cq_stats/cq_stats/middleware.py
new file mode 100644
index 0000000..92df90e
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/middleware.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Custom middlewares applicable to all apps on this site."""
+
+from __future__ import print_function
+
+from django.db import connection
+
+
+class SqlPrintingMiddleware(object):
+  """Middleware to print SQL stats for each page load."""
+
+  # We hard code the terminal width because appengine SDK does not support the
+  # fcntl python module. Without that, there's no reliable way to obtain the
+  # terminal width.
+  TERMINAL_WIDTH = 80
+  INDENTATION = 2
+  SQL_WIDTH = TERMINAL_WIDTH - INDENTATION
+  INDENTATION_SPACE = ' ' * INDENTATION
+
+  def _DisplayRed(self, value):
+    return '\033[1;31m%s\033[0m' % value
+
+  def _DisplayGreen(self, value):
+    return '\033[1;32m%s\033[0m' % value
+
+  def _PrintWithIndentation(self, value):
+    print ('%s%s' % (self.INDENTATION_SPACE, value))
+
+  def process_response(self, _, response):
+    """Log SQL stats before forwarding response to the user."""
+    if len(connection.queries) > 0:
+      total_time = 0.0
+      for query in connection.queries:
+        total_time = total_time + float(query['time'])
+
+        nice_sql = query['sql']
+        sql = '[%s] %s' % (self._DisplayRed(query['time']), nice_sql)
+
+        while len(sql) > self.SQL_WIDTH:
+          self._PrintWithIndentation(sql[:self.SQL_WIDTH])
+          sql = sql[self.SQL_WIDTH:]
+        self._PrintWithIndentation(sql)
+      self._PrintWithIndentation(self._DisplayGreen(
+          '[TOTAL QUERIES: %s]' % len(connection.queries)))
+      self._PrintWithIndentation(self._DisplayGreen(
+          '[TOTAL TIME: %s seconds]' % total_time))
+    return response
diff --git a/appengine/cq_stats/cq_stats/settings.py b/appengine/cq_stats/cq_stats/settings.py
new file mode 100644
index 0000000..2f54b39
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/settings.py
@@ -0,0 +1,254 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Django settings for cq_stats project
+
+TODO(pprabhu): These settings should be instead stored in the app local DB that
+AE provides.
+It's probably safer that way (no settings in source), and it's easier to manage
+the instance from the AE admin interface than having to update source and
+relaunch.
+"""
+
+from __future__ import print_function
+
+import os
+
+
+def _IsOnAppEngine():
+  return os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine')
+
+
+def _AssertFileExists(path):
+  assert os.path.isfile(path), '%s must exist on %s' % (
+      path,
+      'deployed app' if _IsOnAppEngine() else 'local dev_appserver')
+
+
+if _IsOnAppEngine():
+  # Import into our namespace. This only contains auto-generated constants.
+  # It only exists on the deployed app.
+  # pylint: disable=no-name-in-module
+  # pylint: disable=import-error
+  # pylint: disable=wildcard-import
+  from cq_stats.deploy_settings import *
+else:
+  # All the settings that would be defined by deploy_settings.
+  DEBUG = True
+  TEMPLATE_DEBUG = True
+  SECRET_KEY = 'PLACEHOLDER_NON_KEY'
+  CIDB_PROJECT_NAME = 'cosmic-strategy-646'
+  CIDB_INSTANCE_NAME = 'debug-cidb'
+
+# ##############################################################################
+# DEPLOY OVERRIDES
+# Some settings are autogenerated by the deploy script.
+# If you want to override any of them, just define them here after the generated
+# module has been imported.
+#
+# The most common case. When something goes wrong only after deploy.
+# DEBUG = True
+# TEMPLATE_DEBUG = True
+# ##############################################################################
+
+BASE_DIR = os.path.dirname(os.path.dirname(__file__))
+PROJECT_NAME = os.path.basename(os.path.dirname(__file__))
+PROJECT_DIR = os.path.join(BASE_DIR, PROJECT_NAME)
+# dev_appserver.py doesn't pass in any environment variables, so you *must*
+# create this symlink yourself. :(
+# These credentials must be for the 'annotator' cidb user for the debug
+# instance of cidb. See go/cros-cidb-admin
+CIDB_CREDS_DIR = os.path.join(BASE_DIR, 'annotator_cidb_creds')
+
+ANNOTATOR_PASSWORD_PATH = os.path.join(CIDB_CREDS_DIR, 'password.txt')
+_AssertFileExists(ANNOTATOR_PASSWORD_PATH)
+with open(ANNOTATOR_PASSWORD_PATH, 'r') as f:
+  annotator_password = f.read().strip()
+
+if not _IsOnAppEngine():
+  CIDB_HOST_PATH = os.path.join(CIDB_CREDS_DIR, 'host.txt')
+  CIDB_SERVER_CA_PATH = os.path.join(CIDB_CREDS_DIR, 'server-ca.pem')
+  CIDB_CLIENT_CERT_PATH = os.path.join(CIDB_CREDS_DIR, 'client-cert.pem')
+  CIDB_CLIENT_KEY_PATH = os.path.join(CIDB_CREDS_DIR, 'client-key.pem')
+  _AssertFileExists(CIDB_HOST_PATH)
+  _AssertFileExists(CIDB_SERVER_CA_PATH)
+  _AssertFileExists(CIDB_CLIENT_CERT_PATH)
+  _AssertFileExists(CIDB_CLIENT_KEY_PATH)
+  with open(CIDB_HOST_PATH, 'r') as f:
+    cidb_host = f.read().strip()
+
+# Setup database map.
+if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine'):
+  # Running on production AppEngine. Use CloudSQL via unix socket.
+  default_database = {
+      'ENGINE': 'django.db.backends.mysql',
+      'HOST': '/cloudsql/%s:%s' % (CIDB_PROJECT_NAME,
+                                   CIDB_INSTANCE_NAME),
+      'NAME': 'cidb',
+      'USER': 'annotator',
+      'PASSWORD': annotator_password}
+else:
+  default_database = {
+      'ENGINE': 'django.db.backends.mysql',
+      'HOST': cidb_host,
+      'PORT': '3306',
+      'NAME': 'cidb',
+      'USER': 'annotator',
+      'PASSWORD': annotator_password,
+      'OPTIONS': {
+          'ssl': {'ca': CIDB_SERVER_CA_PATH,
+                  'cert': CIDB_CLIENT_CERT_PATH,
+                  'key': CIDB_CLIENT_KEY_PATH}}}
+
+DATABASES = {'default': default_database}
+
+# Hosts/domain names that are valid for this site; required if DEBUG is False
+# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
+ALLOWED_HOSTS = ['chromiumos-build-annotator.googleplex.com',
+                 'chromiumos-build-annotator-dbg.googleplex.com']
+
+# Local time zone for this installation. Choices can be found here:
+# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
+# although not all choices may be available on all operating systems.
+# In a Windows environment this must be set to your system time zone.
+TIME_ZONE = 'America/Los_Angeles'
+
+# Language code for this installation. All choices can be found here:
+# http://www.i18nguy.com/unicode/language-identifiers.html
+LANGUAGE_CODE = 'en-us'
+
+SITE_ID = 1
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
+# If you set this to False, Django will not format dates, numbers and
+# calendars according to the current locale.
+USE_L10N = True
+
+# If you set this to False, Django will not use timezone-aware datetimes.
+USE_TZ = True
+
+# Absolute filesystem path to the directory that will hold user-uploaded files.
+# Example: '/var/www/example.com/media/'
+MEDIA_ROOT = ''
+
+# URL that handles the media served from MEDIA_ROOT. Make sure to use a
+# trailing slash.
+# Examples: 'http://example.com/media/', 'http://media.example.com/'
+MEDIA_URL = ''
+
+# Absolute path to the directory static files should be collected to.
+# Don't put anything in this directory yourself; store your static files
+# in apps' 'static/' subdirectories and in STATICFILES_DIRS.
+# Example: '/var/www/example.com/static/'
+STATIC_ROOT = os.path.join(PROJECT_DIR, 'static')
+
+# URL prefix for static files.
+# Example: 'http://example.com/static/', 'http://static.example.com/'
+STATIC_URL = '/static/'
+
+# Additional locations of static files
+STATICFILES_DIRS = (
+    # Put strings here, like '/home/html/static' or 'C:/www/django/static'.
+    # Always use forward slashes, even on Windows.
+    # Don't forget to use absolute paths, not relative paths.
+)
+
+# List of finder classes that know how to find static files in
+# various locations.
+STATICFILES_FINDERS = (
+    'django.contrib.staticfiles.finders.FileSystemFinder',
+    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
+    # 'django.contrib.staticfiles.finders.DefaultStorageFinder',
+)
+
+# Make this unique, and don't share it with anybody.
+# TODO(pprabhu): Add secret key to valentine, must be updated before pushing to
+# prod.
+SECRET_KEY = 'NotSoSecretKeyThatSomeOneDreamtUp'
+
+# List of callables that know how to import templates from various sources.
+TEMPLATE_LOADERS = (
+    'django.template.loaders.filesystem.Loader',
+    'django.template.loaders.app_directories.Loader',
+    # 'django.template.loaders.eggs.Loader',
+)
+
+MIDDLEWARE_CLASSES = [
+    'django.middleware.common.CommonMiddleware',
+    'django.contrib.sessions.middleware.SessionMiddleware',
+    'django.middleware.csrf.CsrfViewMiddleware',
+    # 'django.contrib.auth.middleware.AuthenticationMiddleware',
+    'django.contrib.messages.middleware.MessageMiddleware',
+    # Uncomment the next line for simple clickjacking protection:
+    # 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+]
+if DEBUG:
+  MIDDLEWARE_CLASSES.append('cq_stats.middleware.SqlPrintingMiddleware')
+MIDDLEWARE_CLASSES = tuple(MIDDLEWARE_CLASSES)
+
+ROOT_URLCONF = 'cq_stats.urls'
+
+# Python dotted path to the WSGI application used by Django's runserver.
+WSGI_APPLICATION = 'cq_stats.wsgi.application'
+
+TEMPLATE_DIRS = (
+    # Put strings here, like '/home/html/django_templates' or
+    # 'C:/www/django/templates'.
+    # Always use forward slashes, even on Windows.
+    # Don't forget to use absolute paths, not relative paths.
+)
+
+INSTALLED_APPS = (
+    # 'django.contrib.auth',
+    'django.contrib.contenttypes',
+    'django.contrib.sessions',
+    'django.contrib.sites',
+    'django.contrib.messages',
+    'django.contrib.staticfiles',
+    # 'django.contrib.admin',
+    # Uncomment the next line to enable admin documentation:
+    # 'django.contrib.admindocs',
+
+    # Apps in this project
+    'build_annotations'
+)
+
+SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
+
+# (pprabhu): Cookie based sessions are temporary. They have various drawbacks,
+# including a load time penalty if the size grows. OTOH, they are the easiest to
+# bringup on AppEngine. Let's use these to get started.
+SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
+
+# A sample logging configuration. The only tangible logging
+# performed by this configuration is to send an email to
+# the site admins on every HTTP 500 error when DEBUG=False.
+# See http://docs.djangoproject.com/en/dev/topics/logging for
+# more details on how to customize your logging configuration.
+LOGGING = {
+    'version': 1,
+    'disable_existing_loggers': False,
+    'filters': {
+        'require_debug_false': {
+            '()': 'django.utils.log.RequireDebugFalse'
+        }
+    },
+    'handlers': {
+        'mail_admins': {
+            'level': 'ERROR',
+            'filters': ['require_debug_false'],
+            'class': 'django.utils.log.AdminEmailHandler'
+        }
+    },
+    'loggers': {
+        'django.request': {
+            'handlers': ['mail_admins'],
+            'level': 'ERROR',
+            'propagate': True,
+        },
+    }
+}
diff --git a/appengine/cq_stats/cq_stats/urls.py b/appengine/cq_stats/cq_stats/urls.py
new file mode 100644
index 0000000..79bf8fe
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/urls.py
@@ -0,0 +1,21 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The main url dispatcher for this project."""
+
+from __future__ import print_function
+
+from django import http
+from django.conf import urls
+
+
+# Uncomment the next two lines to enable the admin:
+# from django.contrib import admin
+# admin.autodiscover()
+urlpatterns = urls.patterns(
+    '',
+    urls.url(r'^$', lambda r: http.HttpResponseRedirect('build_annotations/')),
+    urls.url(r'^build_annotations/', urls.include(
+        'build_annotations.urls',
+        namespace='build_annotations')))
diff --git a/appengine/cq_stats/cq_stats/wsgi.py b/appengine/cq_stats/cq_stats/wsgi.py
new file mode 100644
index 0000000..e3e6c8f
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/wsgi.py
@@ -0,0 +1,34 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""WSGI config for cq_stats project.
+
+This module contains the WSGI application used by Django's development server
+and any production WSGI deployments. It should expose a module-level variable
+named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
+this application via the ``WSGI_APPLICATION`` setting.
+
+Usually you will have the standard Django WSGI application here, but it also
+might make sense to replace the whole Django WSGI application with a custom one
+that later delegates to the Django one. For example, you could introduce WSGI
+middleware here, or combine a Django application with an application of another
+framework.
+"""
+
+from __future__ import print_function
+
+import os
+
+
+# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
+# if running multiple sites in the same mod_wsgi process. To fix this, use
+# mod_wsgi daemon mode with each site in its own daemon process, or use
+# os.environ["DJANGO_SETTINGS_MODULE"] = "cq_stats.settings"
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cq_stats.settings')
+
+# This application object is used by any WSGI server configured to use this
+# file. This includes Django's development server, if the WSGI_APPLICATION
+# setting points here.
+from django.core.wsgi import get_wsgi_application
+application = get_wsgi_application()
diff --git a/appengine/cq_stats/deploy_app b/appengine/cq_stats/deploy_app
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/appengine/cq_stats/deploy_app
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/appengine/cq_stats/deploy_app.py b/appengine/cq_stats/deploy_app.py
new file mode 100644
index 0000000..2b5698f
--- /dev/null
+++ b/appengine/cq_stats/deploy_app.py
@@ -0,0 +1,145 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script to deploy the cq_stats app to our appengine instances."""
+
+from __future__ import print_function
+
+import os
+import time
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+APP_INSTANCE_DEBUG = 'debug'
+APP_INSTANCE_PROD = 'prod'
+
+APP_INSTANCE_NAME = {
+    APP_INSTANCE_DEBUG: 'google.com:chromiumos-build-annotator-dbg',
+    APP_INSTANCE_PROD: 'google.com:chromiumos-build-annotator',
+}
+APP_INSTANCE_CIDB = {
+    APP_INSTANCE_DEBUG: 'debug-cidb',
+    APP_INSTANCE_PROD: 'cidb',
+}
+
+
+def _GetParser():
+  """Get parser for deploy_app cli.
+
+  Returns:
+    commandline.ArgumentParser object to parse the commandline args.
+  """
+  parser = commandline.ArgumentParser()
+  parser.add_argument('instance', type=str,
+                      choices=(APP_INSTANCE_DEBUG, APP_INSTANCE_PROD),
+                      help='The app instance to deploy to')
+  parser.add_argument('--secret-key', type=str, required=True,
+                      help='The secret key to sign django cookies.')
+  return parser
+
+
+def _GetDeploySettings(options):
+  """The autogenerated part of django settings.
+
+  Returns:
+    python "code" as str to be written to the settings file.
+  """
+  content = [
+      '# DO NOT EDIT! Autogenerated by %s.' % os.path.basename(__file__),
+      'DEBUG = False',
+      'TEMPLATE_DEBUG = False',
+      'SECRET_KEY = "%s"' % options.secret_key,
+      'CIDB_PROJECT_NAME = "cosmic-strategy-646"',
+      'CIDB_INSTANCE_NAME = "%s"' % APP_INSTANCE_CIDB[options.instance],
+  ]
+  return '\n'.join(content)
+
+
+def _DeployApp(basedir):
+  """Deploy the prepared app from basedir.
+
+  Args:
+    basedir: The base directory where the app has already been prepped.
+  """
+  cros_build_lib.RunCommand(
+      ['./ae_shell', 'cq_stats', '--',
+       'python', 'cq_stats/manage.py', 'collectstatic', '--noinput'],
+      cwd=basedir)
+
+  # Remove sensetive files that are needed to run tools locally to prepare the
+  # deploy directory, but that we don't want to push to AE.
+  cidb_cred_path = os.path.join(basedir, 'cq_stats', 'annotator_cidb_creds')
+  osutils.SafeUnlink(os.path.join(cidb_cred_path, 'client-cert.pem'))
+  osutils.SafeUnlink(os.path.join(cidb_cred_path, 'client-key.pem'))
+  osutils.SafeUnlink(os.path.join(cidb_cred_path, 'server-ca.pem'))
+  cros_build_lib.RunCommand(
+      ['./ae_shell', 'cq_stats', '--',
+       'appcfg.py', '--oauth2', 'update', 'cq_stats'],
+      cwd=basedir)
+
+
+def _Hang(tempdir):
+  """How else will you ever work on this script?
+
+  Args:
+    tempdir: The directory prepared for deploying the app.
+  """
+  logging.info('All the real stuff\'s done. Tempdir: %s', tempdir)
+  while True:
+    logging.info('Sleeping... Hit Ctrl-C to exit.')
+    time.sleep(30)
+
+
+def main(argv):
+  parser = _GetParser()
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  with osutils.TempDir() as tempdir:
+    # This is rsync in 'archive' mode, but symlinks are followed to copy actual
+    # files/directories.
+    rsync_cmd = ['rsync', '-qrLgotD', '--exclude=\'*/*.pyc\'']
+    chromite_dir = os.path.dirname(
+        os.path.dirname(
+            os.path.dirname(
+                os.path.abspath(__file__))))
+
+    cmd = rsync_cmd + [
+        'chromite/appengine/', tempdir,
+        '--exclude=google_appengine_*',
+    ]
+    cros_build_lib.RunCommand(cmd, cwd=os.path.dirname(chromite_dir))
+
+    cmd = rsync_cmd + [
+        'chromite', os.path.join(tempdir, 'cq_stats'),
+        '--exclude=appengine',
+        '--exclude=third_party',
+        '--exclude=ssh_keys',
+        '--exclude=contrib',
+        '--exclude=.git',
+    ]
+    cros_build_lib.RunCommand(cmd, cwd=os.path.dirname(chromite_dir))
+
+    osutils.WriteFile(os.path.join(tempdir, 'cq_stats', 'cq_stats',
+                                   'deploy_settings.py'),
+                      _GetDeploySettings(options))
+
+    # update the instance we're updating.
+    # Use absolute path. Let's not update sourcedir by mistake.
+    app_yaml_path = os.path.join(tempdir, 'cq_stats', 'app.yaml')
+    regex = (r's/^application:[ \t]*[a-zA-Z0-9_-\.:]\+[ \t]*$'
+             '/application: %s/')
+    cmd = [
+        'sed', '-i',
+        '-e', regex % APP_INSTANCE_NAME[options.instance],
+        app_yaml_path,
+    ]
+    cros_build_lib.RunCommand(cmd, cwd=tempdir)
+
+    _DeployApp(tempdir)
+    # _Hang(tempdir)
diff --git a/appengine/cq_stats/dev_appserver b/appengine/cq_stats/dev_appserver
new file mode 100755
index 0000000..112b29a
--- /dev/null
+++ b/appengine/cq_stats/dev_appserver
@@ -0,0 +1,11 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+this_dir="$(readlink -e "$(dirname "${BASH_SOURCE}")")"
+ae_shell="${this_dir}/../ae_shell"
+"${ae_shell}" cq_stats -- python cq_stats/manage.py collectstatic --noinput
+if [[ $? -eq 0 ]]; then
+  "${ae_shell}" cq_stats -- dev_appserver.py cq_stats
+fi
diff --git a/appengine/cq_stats/manage.py b/appengine/cq_stats/manage.py
new file mode 100755
index 0000000..294feaf
--- /dev/null
+++ b/appengine/cq_stats/manage.py
@@ -0,0 +1,17 @@
+#!/usr/bin/python2
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""(Semi-)Autogenerated django module for app management."""
+
+from __future__ import print_function
+
+import os
+import sys
+
+
+if __name__ == '__main__':
+  os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cq_stats.settings')
+  from django.core.management import execute_from_command_line
+  execute_from_command_line(sys.argv)
diff --git a/appengine/dev_appserver b/appengine/dev_appserver
new file mode 100755
index 0000000..727c135
--- /dev/null
+++ b/appengine/dev_appserver
@@ -0,0 +1,24 @@
+#!/bin/bash -e
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple wrapper around ./google_appengine/dev_appserver.py
+
+# https://developers.google.com/appengine/downloads#Google_App_Engine_SDK_for_Python
+SDK_VER="1.8.6"
+
+srcdir="${0%/*}"
+pushd "${srcdir}" >/dev/null
+
+if [ ! -d google_appengine ]; then
+  zip="google_appengine_${SDK_VER}.zip"
+  wget -c http://googleappengine.googlecode.com/files/${zip}
+  echo "Unpacking ${zip}"
+  unzip -q ${zip}
+fi
+
+popd >/dev/null
+
+HOST=$(hostname | awk -F. '{print $1}')
+exec "${srcdir}"/google_appengine/dev_appserver.py --host ${HOST} "$@"
diff --git a/bin/__init__.py b/bin/__init__.py
new file mode 100644
index 0000000..1c53994
--- /dev/null
+++ b/bin/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(ferringb): remove this once depot_tools is updated to no longer
+# have any real logic in chromite_wrapper.
diff --git a/bin/account_tool b/bin/account_tool
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/account_tool
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/autotest_quickmerge b/bin/autotest_quickmerge
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/autotest_quickmerge
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cbuildbot b/bin/cbuildbot
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cbuildbot
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cbuildbot_view_config b/bin/cbuildbot_view_config
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cbuildbot_view_config
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/check_gdata_token b/bin/check_gdata_token
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/check_gdata_token
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/chrome_update_extension_cache b/bin/chrome_update_extension_cache
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/chrome_update_extension_cache
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cidb_admin b/bin/cidb_admin
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cidb_admin
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros b/bin/cros
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_best_revision b/bin/cros_best_revision
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_best_revision
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_brick_utils b/bin/cros_brick_utils
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_brick_utils
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_check_patches b/bin/cros_check_patches
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_check_patches
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_deps_diff b/bin/cros_deps_diff
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_deps_diff
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_extract_deps b/bin/cros_extract_deps
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_extract_deps
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_gdb b/bin/cros_gdb
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_gdb
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_breakpad_symbols b/bin/cros_generate_breakpad_symbols
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_breakpad_symbols
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_deps_graphs b/bin/cros_generate_deps_graphs
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_deps_graphs
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_local_binhosts b/bin/cros_generate_local_binhosts
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_local_binhosts
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_sysroot b/bin/cros_generate_sysroot
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_sysroot
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_install_debug_syms b/bin/cros_install_debug_syms
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_install_debug_syms
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_list_modified_packages b/bin/cros_list_modified_packages
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_list_modified_packages
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_list_overlays b/bin/cros_list_overlays
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_list_overlays
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_mark_as_stable b/bin/cros_mark_as_stable
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_mark_as_stable
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_mark_chrome_as_stable b/bin/cros_mark_chrome_as_stable
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_mark_chrome_as_stable
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_mark_mojo_as_stable b/bin/cros_mark_mojo_as_stable
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_mark_mojo_as_stable
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_merge_to_branch b/bin/cros_merge_to_branch
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_merge_to_branch
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_portage_upgrade b/bin/cros_portage_upgrade
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_portage_upgrade
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_run_unit_tests b/bin/cros_run_unit_tests
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_run_unit_tests
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_sdk b/bin/cros_sdk
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_sdk
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_set_lsb_release b/bin/cros_set_lsb_release
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_set_lsb_release
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_setup_toolchains b/bin/cros_setup_toolchains
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_setup_toolchains
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_show_waterfall_layout b/bin/cros_show_waterfall_layout
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_show_waterfall_layout
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_sysroot_utils b/bin/cros_sysroot_utils
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_sysroot_utils
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_workon b/bin/cros_workon
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_workon
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/crosfw b/bin/crosfw
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/crosfw
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/dep_tracker b/bin/dep_tracker
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/dep_tracker
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/deploy_chrome b/bin/deploy_chrome
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/deploy_chrome
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/diff_license_html b/bin/diff_license_html
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/diff_license_html
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/fwgdb b/bin/fwgdb
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/fwgdb
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/gconv_strip b/bin/gconv_strip
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/gconv_strip
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/generate_container_spec b/bin/generate_container_spec
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/generate_container_spec
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/generate_delta_sysroot b/bin/generate_delta_sysroot
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/generate_delta_sysroot
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/gerrit b/bin/gerrit
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/gerrit
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/gs_fetch_binpkg b/bin/gs_fetch_binpkg
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/gs_fetch_binpkg
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/lddtree b/bin/lddtree
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/lddtree
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/loman b/bin/loman
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/loman
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/merge_package_status b/bin/merge_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/merge_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/parallel_emerge b/bin/parallel_emerge
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/parallel_emerge
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/preupload_dump_config b/bin/preupload_dump_config
new file mode 100755
index 0000000..fc92c04
--- /dev/null
+++ b/bin/preupload_dump_config
@@ -0,0 +1,16 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+bin/cbuildbot_view_config --update_config
+if [ "$(git diff cbuildbot/config_dump.json)" ]; then
+  echo "You have uncommitted changes to cbuildbot/config_dump.json"
+  echo "This is likely because you have modified:"
+  echo "  cbuildbot/chromeos_config.py"
+  echo "Please --amend your commit to include config_dump.json."
+  echo
+  echo "In future, you can update the dump file with the command"
+  echo "bin/cbuildbot_view_config --update_config"
+  exit 1
+fi
diff --git a/bin/pushimage b/bin/pushimage
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/pushimage
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/refresh_package_status b/bin/refresh_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/refresh_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/summarize_build_stats b/bin/summarize_build_stats
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/summarize_build_stats
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/sync_chrome b/bin/sync_chrome
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/sync_chrome
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/sync_package_status b/bin/sync_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/sync_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/test_image b/bin/test_image
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/test_image
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/update_manifest_remotes b/bin/update_manifest_remotes
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/update_manifest_remotes
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_command_stats b/bin/upload_command_stats
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_command_stats
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_package_status b/bin/upload_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_prebuilts b/bin/upload_prebuilts
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_prebuilts
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_symbols b/bin/upload_symbols
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_symbols
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bootstrap/__init__.py b/bootstrap/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/__init__.py
diff --git a/bootstrap/brillo b/bootstrap/brillo
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bootstrap/brillo
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bootstrap/cbuildbot b/bootstrap/cbuildbot
new file mode 120000
index 0000000..3a4369d
--- /dev/null
+++ b/bootstrap/cbuildbot
@@ -0,0 +1 @@
+support/chromite_wrapper
\ No newline at end of file
diff --git a/bootstrap/cros b/bootstrap/cros
new file mode 120000
index 0000000..3a4369d
--- /dev/null
+++ b/bootstrap/cros
@@ -0,0 +1 @@
+support/chromite_wrapper
\ No newline at end of file
diff --git a/bootstrap/cros_sdk b/bootstrap/cros_sdk
new file mode 120000
index 0000000..3a4369d
--- /dev/null
+++ b/bootstrap/cros_sdk
@@ -0,0 +1 @@
+support/chromite_wrapper
\ No newline at end of file
diff --git a/bootstrap/repo b/bootstrap/repo
new file mode 100755
index 0000000..c6be5c1
--- /dev/null
+++ b/bootstrap/repo
@@ -0,0 +1,866 @@
+#!/usr/bin/python2
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+## repo default configuration
+##
+REPO_URL='https://chromium.googlesource.com/external/repo'
+REPO_REV='stable'
+
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# increment this whenever we make important changes to this script
+VERSION = (1, 21)
+
+# increment this if the MAINTAINER_KEYS block is modified
+KEYRING_VERSION = (1, 4)
+MAINTAINER_KEYS = """
+
+     Repo Maintainer <repo@android.kernel.org>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.2.2 (GNU/Linux)
+
+mQGiBEj3ugERBACrLJh/ZPyVSKeClMuznFIrsQ+hpNnmJGw1a9GXKYKk8qHPhAZf
+WKtrBqAVMNRLhL85oSlekRz98u41H5si5zcuv+IXJDF5MJYcB8f22wAy15lUqPWi
+VCkk1l8qqLiuW0fo+ZkPY5qOgrvc0HW1SmdH649uNwqCbcKb6CxaTxzhOwCgj3AP
+xI1WfzLqdJjsm1Nq98L0cLcD/iNsILCuw44PRds3J75YP0pze7YF/6WFMB6QSFGu
+aUX1FsTTztKNXGms8i5b2l1B8JaLRWq/jOnZzyl1zrUJhkc0JgyZW5oNLGyWGhKD
+Fxp5YpHuIuMImopWEMFIRQNrvlg+YVK8t3FpdI1RY0LYqha8pPzANhEYgSfoVzOb
+fbfbA/4ioOrxy8ifSoga7ITyZMA+XbW8bx33WXutO9N7SPKS/AK2JpasSEVLZcON
+ae5hvAEGVXKxVPDjJBmIc2cOe7kOKSi3OxLzBqrjS2rnjiP4o0ekhZIe4+ocwVOg
+e0PLlH5avCqihGRhpoqDRsmpzSHzJIxtoeb+GgGEX8KkUsVAhbQpUmVwbyBNYWlu
+dGFpbmVyIDxyZXBvQGFuZHJvaWQua2VybmVsLm9yZz6IYAQTEQIAIAUCSPe6AQIb
+AwYLCQgHAwIEFQIIAwQWAgMBAh4BAheAAAoJEBZTDV6SD1xl1GEAn0x/OKQpy7qI
+6G73NJviU0IUMtftAKCFMUhGb/0bZvQ8Rm3QCUpWHyEIu7kEDQRI97ogEBAA2wI6
+5fs9y/rMwD6dkD/vK9v4C9mOn1IL5JCPYMJBVSci+9ED4ChzYvfq7wOcj9qIvaE0
+GwCt2ar7Q56me5J+byhSb32Rqsw/r3Vo5cZMH80N4cjesGuSXOGyEWTe4HYoxnHv
+gF4EKI2LK7xfTUcxMtlyn52sUpkfKsCpUhFvdmbAiJE+jCkQZr1Z8u2KphV79Ou+
+P1N5IXY/XWOlq48Qf4MWCYlJFrB07xjUjLKMPDNDnm58L5byDrP/eHysKexpbakL
+xCmYyfT6DV1SWLblpd2hie0sL3YejdtuBMYMS2rI7Yxb8kGuqkz+9l1qhwJtei94
+5MaretDy/d/JH/pRYkRf7L+ke7dpzrP+aJmcz9P1e6gq4NJsWejaALVASBiioqNf
+QmtqSVzF1wkR5avZkFHuYvj6V/t1RrOZTXxkSk18KFMJRBZrdHFCWbc5qrVxUB6e
+N5pja0NFIUCigLBV1c6I2DwiuboMNh18VtJJh+nwWeez/RueN4ig59gRTtkcc0PR
+35tX2DR8+xCCFVW/NcJ4PSePYzCuuLvp1vEDHnj41R52Fz51hgddT4rBsp0nL+5I
+socSOIIezw8T9vVzMY4ArCKFAVu2IVyBcahTfBS8q5EM63mONU6UVJEozfGljiMw
+xuQ7JwKcw0AUEKTKG7aBgBaTAgT8TOevpvlw91cAAwUP/jRkyVi/0WAb0qlEaq/S
+ouWxX1faR+vU3b+Y2/DGjtXQMzG0qpetaTHC/AxxHpgt/dCkWI6ljYDnxgPLwG0a
+Oasm94BjZc6vZwf1opFZUKsjOAAxRxNZyjUJKe4UZVuMTk6zo27Nt3LMnc0FO47v
+FcOjRyquvgNOS818irVHUf12waDx8gszKxQTTtFxU5/ePB2jZmhP6oXSe4K/LG5T
++WBRPDrHiGPhCzJRzm9BP0lTnGCAj3o9W90STZa65RK7IaYpC8TB35JTBEbrrNCp
+w6lzd74LnNEp5eMlKDnXzUAgAH0yzCQeMl7t33QCdYx2hRs2wtTQSjGfAiNmj/WW
+Vl5Jn+2jCDnRLenKHwVRFsBX2e0BiRWt/i9Y8fjorLCXVj4z+7yW6DawdLkJorEo
+p3v5ILwfC7hVx4jHSnOgZ65L9s8EQdVr1ckN9243yta7rNgwfcqb60ILMFF1BRk/
+0V7wCL+68UwwiQDvyMOQuqkysKLSDCLb7BFcyA7j6KG+5hpsREstFX2wK1yKeraz
+5xGrFy8tfAaeBMIQ17gvFSp/suc9DYO0ICK2BISzq+F+ZiAKsjMYOBNdH/h0zobQ
+HTHs37+/QLMomGEGKZMWi0dShU2J5mNRQu3Hhxl3hHDVbt5CeJBb26aQcQrFz69W
+zE3GNvmJosh6leayjtI9P2A6iEkEGBECAAkFAkj3uiACGwwACgkQFlMNXpIPXGWp
+TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2
+=CMiZ
+-----END PGP PUBLIC KEY BLOCK-----
+
+     Conley Owens <cco3@android.com>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.11 (GNU/Linux)
+
+mQENBFHRvc8BCADFg45Xx/y6QDC+T7Y/gGc7vx0ww7qfOwIKlAZ9xG3qKunMxo+S
+hPCnzEl3cq+6I1Ww/ndop/HB3N3toPXRCoN8Vs4/Hc7by+SnaLFnacrm+tV5/OgT
+V37Lzt8lhay1Kl+YfpFwHYYpIEBLFV9knyfRXS/428W2qhdzYfvB15/AasRmwmor
+py4NIzSs8UD/SPr1ihqNCdZM76+MQyN5HMYXW/ALZXUFG0pwluHFA7hrfPG74i8C
+zMiP7qvMWIl/r/jtzHioH1dRKgbod+LZsrDJ8mBaqsZaDmNJMhss9g76XvfMyLra
+9DI9/iFuBpGzeqBv0hwOGQspLRrEoyTeR6n1ABEBAAG0H0NvbmxleSBPd2VucyA8
+Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlHRvc8CGwMGCwkIBwMCBhUIAgkK
+CwQWAgMBAh4BAheAAAoJEGe35EhpKzgsP6AIAJKJmNtn4l7hkYHKHFSo3egb6RjQ
+zEIP3MFTcu8HFX1kF1ZFbrp7xqurLaE53kEkKuAAvjJDAgI8mcZHP1JyplubqjQA
+xvv84gK+OGP3Xk+QK1ZjUQSbjOpjEiSZpRhWcHci3dgOUH4blJfByHw25hlgHowd
+a/2PrNKZVcJ92YienaxxGjcXEUcd0uYEG2+rwllQigFcnMFDhr9B71MfalRHjFKE
+fmdoypqLrri61YBc59P88Rw2/WUpTQjgNubSqa3A2+CKdaRyaRw+2fdF4TdR0h8W
+zbg+lbaPtJHsV+3mJC7fq26MiJDRJa5ZztpMn8su20gbLgi2ShBOaHAYDDi5AQ0E
+UdG9zwEIAMoOBq+QLNozAhxOOl5GL3StTStGRgPRXINfmViTsihrqGCWBBUfXlUE
+OytC0mYcrDUQev/8ToVoyqw+iGSwDkcSXkrEUCKFtHV/GECWtk1keyHgR10YKI1R
+mquSXoubWGqPeG1PAI74XWaRx8UrL8uCXUtmD8Q5J7mDjKR5NpxaXrwlA0bKsf2E
+Gp9tu1kKauuToZhWHMRMqYSOGikQJwWSFYKT1KdNcOXLQF6+bfoJ6sjVYdwfmNQL
+Ixn8QVhoTDedcqClSWB17VDEFDFa7MmqXZz2qtM3X1R/MUMHqPtegQzBGNhRdnI2
+V45+1Nnx/uuCxDbeI4RbHzujnxDiq70AEQEAAYkBHwQYAQIACQUCUdG9zwIbDAAK
+CRBnt+RIaSs4LNVeB/0Y2pZ8I7gAAcEM0Xw8drr4omg2fUoK1J33ozlA/RxeA/lJ
+I3KnyCDTpXuIeBKPGkdL8uMATC9Z8DnBBajRlftNDVZS3Hz4G09G9QpMojvJkFJV
+By+01Flw/X+eeN8NpqSuLV4W+AjEO8at/VvgKr1AFvBRdZ7GkpI1o6DgPe7ZqX+1
+dzQZt3e13W0rVBb/bUgx9iSLoeWP3aq/k+/GRGOR+S6F6BBSl0SQ2EF2+dIywb1x
+JuinEP+AwLAUZ1Bsx9ISC0Agpk2VeHXPL3FGhroEmoMvBzO0kTFGyoeT7PR/BfKv
++H/g3HsL2LOB9uoIm8/5p2TTU5ttYCXMHhQZ81AY
+=AUp4
+-----END PGP PUBLIC KEY BLOCK-----
+
+     Stefan Zager <szager@chromium.org>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.11 (GNU/Linux)
+
+mQINBFIJOcgBEADwZIq4GRGoO1RJFKlrtVK501cwT5H+Acbizc9N5RxTkFmqxDjb
+9ApUaPW6S1b8+nrzE9P1Ri5erfzipuStfaZ/Wl3mP1JjKulibddmgnPOEbAJ673k
+Vj85RUO4rt2oZAHnZN3D3gFJzVY8JVlZ47Enj9fTqzcW78FVsPCpIT9P2LpTLWeE
+jX9Cjxeimy6VvyJstIcDLYhlpUN5UWen79L4LFAkHf3luLuU4W3p9NriqUsy5UG2
+8vO6QdhKrCr5wsjDFFeVnpMtjlSeZJAWH+XhFFibMX1xP5R9BTuJfzw3kOVKvcE0
+e9ClxgoulepXPv2xnDkqO3pG2gQVzl8LA+Aol8/IXfa7KP5FBkxK/g1cDuDtXRk4
+YLpLaLYeeKEhhOHLpsKYkK2DXTIcN+56UnTLGolummpZnCM8UUSZxQgbkFgk4YJL
+Elip0hgLZzqEl5h9vjmnQp89AZIHKcgNmzn+szLTOR9x24joaLyQ534x8OSC8lmu
+tJv2tQjDOVGWVwvY4gOTpyxCWMwur6WOiMk/TPWdiVRFWAGrAHwf0/CTBEqNhosh
+sVXfPeMADBA0PorDbJ6kwcOkLUTGf8CT7OG1R9TuKPEmSjK7BYu/pT4DXitaRCiv
+uPVlwbVFpLFr0/jwaKJVMLUjL5MaYwzjJqI2c4RdROZhpMhkn4LvCMmFSQARAQAB
+tCJTdGVmYW4gWmFnZXIgPHN6YWdlckBjaHJvbWl1bS5vcmc+iQI4BBMBAgAiBQJS
+CTnIAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRDcuoHPGCdZNU0UD/9y
+0zwwOJH2UGPAzZ0YVzr7p0HtKedoxuFvPkdQxlBIaUOueMzFRmNQu3GI9irAu3MQ
+Jkip8/gi7dnLVmJyS/zWARBaRGwSVd1++87XDjw8n7l181p7394X0Agq/heri599
+YheHXkxXKVMPqByWNEPHu4eDbxeJTaDIjcKC2pzKQkm6HbWgW4wA9gCh1TRki8FP
+LMv1Fu/dr13STCR9P2evsTRZ+ZSJhTSboHNHeEAJGiGZQAsN94oht7647lYj+AyR
+ThzyHDMXXiDr8jPJIkyRilY+y82bCOatOfPoCkce3VI+LRUGJ19hJY01m4RRneIE
+55l7fXR3zggcsONjV5b+oLcGQPGgX9w64BJ7khT7Wb9+kuyrdJBIBzJsaACFEbri
+pPi02FS/HahYpLC3J66REAeNyofgVXau6WQsHrHMGsBTL9aAr0nrCrkF4Nyyc2Jd
+do6nYuljuUhORqbEECmmBM2eBtkL6Ac92D6WMBIwBOC5tCNHO2YFIvi8Y8EuE8sc
+1zB5U5Ai4SIu2icRAhzAhCRaUq02cMWuELKH6Vuh9nzgEefFWty6vPbKEyZLu19D
+B80aqP1cTN88FjtKQ/eTF29TUB6AefUeBS17e2e3WUMy4nc8tduuOFYfiHP40ScP
+wOoatwfzpiTIPGbocUEPL+pS0O/Xy8SINxFMCud3zA==
+=Vd2S
+-----END PGP PUBLIC KEY BLOCK-----
+
+     David James <davidjames@google.com>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mQINBFQKWWsBEACjAxD8xLqNVFX/qOAKFW7R63J3KkkXQKyH5KmSWZnmdfTg4AeR
+h9sAUls16nHiOFp/MRLFFhax8dm33zfED+zHpISFUkMq2Q3UyP6Z6eSpJyYriEF1
+hP7PpwksEnh+hoQ36fhsY1vaQRgTCO8XkFVcChb1CoKUl104PornVlZ378RBUUnK
+FAPhRSTEJtK1QXv6JtQXFzEQbX3jgxsKvpw/Zg7V3FnaMRhHw84YvCAbWz9ayTov
+SBOIczOscD9T/F3NbSlgFwWlQ7JeixdOsCMaYh7gYcXqdq2jluHuKQlTGmGlFwGm
+5TOh6NwvVUV68JZfer2CGMQv4JImQfousy9V+KGddTBfjYkwtmG9oTkSWBLuO91/
+q+TFdHkzNxivPcC+iluJkzrJHcS6aUg8vkLZfT2wrGZUBFH7GsZiKht2env1HyVZ
+64md/auhee4ED3V0mtWSWYyjriAQUIE0LHVHP1zyEf5gVwDZyuE2HlFZr1eFJWiH
+jcxQnGi7IpxF2//NCTvO2dc3eTi4f1EexOyomu9AWk/iIDCgCpkU38XlWgVrvmM1
+Mw5pDm691L1Xn3v3yMRZZUCottUpUEnz5qAa0eQHWBU4PpXUCaWElwwuT+3Lcx1U
+Rdq74UPNb+hBGzrID/KmeU0NxGmhzRIwl+LKdCvnM2v4AvRHIjQPBqC5fQARAQAB
+tCNEYXZpZCBKYW1lcyA8ZGF2aWRqYW1lc0Bnb29nbGUuY29tPokCOAQTAQIAIgUC
+VApZawIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQSlDprdejN6zH5A//
+XRAytpjxTIHTtMWp1c7vpi1BMiKF0XRSa8iizbVgZIk6i/jftK8tverRvOzQhUEK
+mwP6WDoX9SbkvxxQd+AxaRmDCQSf7h/fFMB+q9WycH5Mj+N4mc7iivsf1RdZzlmF
+l1wcJoGVsOTFrccca/ZcXjMhWCfpVNDGn29nFtHKddSORhQgy8x0NVf/8NXOF1OL
+Le4cZKBwSokPJEL1Ta4bNQPkzY251CSjH9feHCE1ac16/wh1qhkozl8/QbIVFVTA
+wk1m6q7raj22+2HifrM/w5YkNXYcEL/SfusbCo/rtax75fG0lT9whB6OXuzk0CTu
+zsdBHaYGKCQ+gcalpxqQ/o+xFo0HNI6duCo1zBFAkSX20HZcU5IWr8C2psTuB5zo
+3vPT89GMNlFVhG4JBvuSHcgJFBoTEALugDX1xiRqidjhKPpDMl3Gcezakg2ethQM
+9zwmdlsbh/stcLh9U6eNOqxrjMgmrMRjDocaMu0gFXoGbEMeVVJWrLGgF51k6Q9w
+U3/pvyws6OukV4y3Sr57ACbeQ1am0pCKir2HXB2jmShJfINSyPqhluMz/q1CbYEE
+R7oWoVIL70qhCr4hdJ4yVtqajkUr5jk+IV9L2pny6zt3+3e/132O6yzQ/1NJ1vj9
+hxSNFwdO/JWdqgYtvsFvWQGdKp+RwYBJBp1XIOBA+5W5Ag0EVApZawEQAMC/t6AF
+1eU2wZcLQaahmv+1yaQCV7VfwH8/Lh1AZbMNEITnp97gJ/6SlQqL0fDfjX8DKGE+
+U23o3fKMJr8tIxJqLVzPROomeG+9zhtq5hI3qu53zhR3bCqQpYPQcIHRHxtttYkP
+p+rdTZlYX09TaSsTITNs0/1dCHEgyDS48ujOSmA0fr9eGyxv/2Chr0sDEkSaerJp
+teDKmUdkKoF9SCR7ntfrSFP3eXYFFy+wb+IQjVVHAdTgossXKPtNxzdEKQQHJESJ
+e1jD5BlOpvysOcbDJaRCq7TE2o3Grwy8Um1/Fv+n9naIAN6bZNSrPtiH2G7nX4l6
+126so5sBhJTSGbIV/fb93PZCIfzfJCA4pinYPJH46zn2Ih3AF9mi4eguBK9/oGBe
+03LsNBsfoEI81rRuAl5NeFNa+YXf3w7olF2qbwZXcGmRBteUBBvfonW64nk8w+Ui
+x14gzHJXH6l9jsIavA1AMtFulmh6eEf8hsDUzq8s0Yg9PphVmknxPVW44EttOwCi
+OnlVelRSbABcCNNTv1vOC8ubvt191YRNwAgGMRmXfeEFce76ckVJei/tiENycMXl
+Ff3+km6WmswsDmKxz+DfNtf5SXM24EifO2Q6uX9pbg+AcIWI9Sc2WAfmqCooTU8g
+H2Ua0dskiAi9qq4DPYrwPO+OzAT10nn/TqmDABEBAAGJAh8EGAECAAkFAlQKWWsC
+GwwACgkQSlDprdejN6wHURAAncjYkIkSseO8lldTVu0qJi2vetc2Q6bR8Lw1hTAT
+TB2LcbFheTu6Q/sxDSC5slovFSgyDp8wNkDf88+fxV38LC00IeWz7a9EGPzLzA+D
+fNFdctnxXZGaYB3cQ17TkKFj4AMqbzKPkt4xYWU/WdSWPPd4feFJVjg7l8BIxafF
+58ZYbWN3DwAgKE9DDZ9praTNC/2ytWh21a2j8LR4GlYERW1pMGrMt37IGvZqbU6W
+a7HWaB7f0eXg5M5GTr7KP6TTGwY/500cI4fDme6bih/jXDS4vV53b1HHgvzQFXw/
+XURueobmqsbQQzDGsqPzkYJM4fxXu0TWNhW8CieZMMypPq3uSgvN3jTu2JB9NAEz
+21Pso0NzKm6wxhMzPA6KWILmR2KQn/t51NTE6u0+8e9RmQeg9Ce+IpPzPLsGuNca
+u+r4LcB98D8jIUXz9PPbIHiDLJjMWOG8olZz1zcHpt86b+bf8c9TxFAE8p3G/jpQ
+qanHjtbgNmkz+JpvJ9CTEEo69tkcbmOaCNwCWQL+Doqqi7tWMYUbAw0Rk+lOSu/N
+4cAccd41XU/GmIs9zKkbORWubhfFndc7AXnPUU2otjqMQq0f+QCQrHPdyARf2QCm
+j8zzwdwkRpt3SSvqzh3+L3Zq8xeb2M6u/QLz4aLFTR7yQJed0DJFUcISii9ccJr/
+IM4=
+=6VNc
+-----END PGP PUBLIC KEY BLOCK-----
+"""
+
+GIT = 'git'                     # our git command
+MIN_GIT_VERSION = (1, 7, 2)     # minimum supported git version
+repodir = '.repo'               # name of repo's private directory
+S_repo = 'repo'                 # special repo repository
+S_manifests = 'manifests'       # special manifest repository
+REPO_MAIN = S_repo + '/main.py' # main script
+MIN_PYTHON_VERSION = (2, 6)     # minimum supported python version
+
+
+import errno
+import optparse
+import os
+import re
+import stat
+import subprocess
+import sys
+
+if sys.version_info[0] == 3:
+  import urllib.request
+  import urllib.error
+else:
+  import imp
+  import urllib2
+  urllib = imp.new_module('urllib')
+  urllib.request = urllib2
+  urllib.error = urllib2
+
+
+def _print(*objects, **kwargs):
+  sep = kwargs.get('sep', ' ')
+  end = kwargs.get('end', '\n')
+  out = kwargs.get('file', sys.stdout)
+  out.write(sep.join(objects) + end)
+
+
+# Python version check
+ver = sys.version_info
+if ver[0] == 3:
+  _print('warning: Python 3 support is currently experimental. YMMV.\n'
+         'Please use Python 2.6 - 2.7 instead.',
+         file=sys.stderr)
+if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
+  _print('error: Python version %s unsupported.\n'
+         'Please use Python 2.6 - 2.7 instead.'
+         % sys.version.split(' ')[0], file=sys.stderr)
+  sys.exit(1)
+
+home_dot_repo = os.path.expanduser('~/.repoconfig')
+gpg_dir = os.path.join(home_dot_repo, 'gnupg')
+
+extra_args = []
+init_optparse = optparse.OptionParser(usage="repo init -u url [options]")
+
+# Logging
+group = init_optparse.add_option_group('Logging options')
+group.add_option('-q', '--quiet',
+                 dest="quiet", action="store_true", default=False,
+                 help="be quiet")
+
+# Manifest
+group = init_optparse.add_option_group('Manifest options')
+group.add_option('-u', '--manifest-url',
+                 dest='manifest_url',
+                 help='manifest repository location', metavar='URL')
+group.add_option('-b', '--manifest-branch',
+                 dest='manifest_branch',
+                 help='manifest branch or revision', metavar='REVISION')
+group.add_option('-m', '--manifest-name',
+                 dest='manifest_name',
+                 help='initial manifest file', metavar='NAME.xml')
+group.add_option('--mirror',
+                 dest='mirror', action='store_true',
+                 help='create a replica of the remote repositories '
+                      'rather than a client working directory')
+group.add_option('--reference',
+                 dest='reference',
+                 help='location of mirror directory', metavar='DIR')
+group.add_option('--depth', type='int', default=None,
+                 dest='depth',
+                 help='create a shallow clone with given depth; see git clone')
+group.add_option('--archive',
+                 dest='archive', action='store_true',
+                 help='checkout an archive instead of a git repository for '
+                      'each project. See git archive.')
+group.add_option('-g', '--groups',
+                 dest='groups', default='default',
+                 help='restrict manifest projects to ones with specified '
+                      'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
+                 metavar='GROUP')
+group.add_option('-p', '--platform',
+                 dest='platform', default="auto",
+                 help='restrict manifest projects to ones with a specified '
+                      'platform group [auto|all|none|linux|darwin|...]',
+                 metavar='PLATFORM')
+
+
+# Tool
+group = init_optparse.add_option_group('repo Version options')
+group.add_option('--repo-url',
+                 dest='repo_url',
+                 help='repo repository location', metavar='URL')
+group.add_option('--repo-branch',
+                 dest='repo_branch',
+                 help='repo branch or revision', metavar='REVISION')
+group.add_option('--no-repo-verify',
+                 dest='no_repo_verify', action='store_true',
+                 help='do not verify repo source code')
+
+# Other
+group = init_optparse.add_option_group('Other options')
+group.add_option('--config-name',
+                 dest='config_name', action="store_true", default=False,
+                 help='Always prompt for name/e-mail')
+
+class CloneFailure(Exception):
+  """Indicate the remote clone of repo itself failed.
+  """
+
+
+def _Init(args):
+  """Installs repo by cloning it over the network.
+  """
+  opt, args = init_optparse.parse_args(args)
+  if args:
+    init_optparse.print_usage()
+    sys.exit(1)
+
+  url = opt.repo_url
+  if not url:
+    url = REPO_URL
+    extra_args.append('--repo-url=%s' % url)
+
+  branch = opt.repo_branch
+  if not branch:
+    branch = REPO_REV
+    extra_args.append('--repo-branch=%s' % branch)
+
+  if branch.startswith('refs/heads/'):
+    branch = branch[len('refs/heads/'):]
+  if branch.startswith('refs/'):
+    _print("fatal: invalid branch name '%s'" % branch, file=sys.stderr)
+    raise CloneFailure()
+
+  try:
+    os.mkdir(repodir)
+  except OSError as e:
+    if e.errno != errno.EEXIST:
+      _print('fatal: cannot make %s directory: %s'
+             % (repodir, e.strerror), file=sys.stderr)
+      # Don't raise CloneFailure; that would delete the
+      # name. Instead exit immediately.
+      #
+      sys.exit(1)
+
+  _CheckGitVersion()
+  try:
+    if NeedSetupGnuPG():
+      can_verify = SetupGnuPG(opt.quiet)
+    else:
+      can_verify = True
+
+    dst = os.path.abspath(os.path.join(repodir, S_repo))
+    _Clone(url, dst, opt.quiet)
+
+    if can_verify and not opt.no_repo_verify:
+      rev = _Verify(dst, branch, opt.quiet)
+    else:
+      rev = 'refs/remotes/origin/%s^0' % branch
+
+    _Checkout(dst, branch, rev, opt.quiet)
+  except CloneFailure:
+    if opt.quiet:
+      _print('fatal: repo init failed; run without --quiet to see why',
+             file=sys.stderr)
+    raise
+
+
+def ParseGitVersion(ver_str):
+  if not ver_str.startswith('git version '):
+    return None
+
+  num_ver_str = ver_str[len('git version '):].strip().split('-')[0]
+  to_tuple = []
+  for num_str in num_ver_str.split('.')[:3]:
+    if num_str.isdigit():
+      to_tuple.append(int(num_str))
+    else:
+      to_tuple.append(0)
+  return tuple(to_tuple)
+
+
+def _CheckGitVersion():
+  cmd = [GIT, '--version']
+  try:
+    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+  except OSError as e:
+    _print(file=sys.stderr)
+    _print("fatal: '%s' is not available" % GIT, file=sys.stderr)
+    _print('fatal: %s' % e, file=sys.stderr)
+    _print(file=sys.stderr)
+    _print('Please make sure %s is installed and in your path.' % GIT,
+           file=sys.stderr)
+    raise CloneFailure()
+
+  ver_str = proc.stdout.read().strip()
+  proc.stdout.close()
+  proc.wait()
+
+  ver_act = ParseGitVersion(ver_str)
+  if ver_act is None:
+    _print('error: "%s" unsupported' % ver_str, file=sys.stderr)
+    raise CloneFailure()
+
+  if ver_act < MIN_GIT_VERSION:
+    need = '.'.join(map(str, MIN_GIT_VERSION))
+    _print('fatal: git %s or later required' % need, file=sys.stderr)
+    raise CloneFailure()
+
+
+def NeedSetupGnuPG():
+  if not os.path.isdir(home_dot_repo):
+    return True
+
+  kv = os.path.join(home_dot_repo, 'keyring-version')
+  if not os.path.exists(kv):
+    return True
+
+  kv = open(kv).read()
+  if not kv:
+    return True
+
+  kv = tuple(map(int, kv.split('.')))
+  if kv < KEYRING_VERSION:
+    return True
+  return False
+
+
+def SetupGnuPG(quiet):
+  try:
+    os.mkdir(home_dot_repo)
+  except OSError as e:
+    if e.errno != errno.EEXIST:
+      _print('fatal: cannot make %s directory: %s'
+             % (home_dot_repo, e.strerror), file=sys.stderr)
+      sys.exit(1)
+
+  try:
+    os.mkdir(gpg_dir, stat.S_IRWXU)
+  except OSError as e:
+    if e.errno != errno.EEXIST:
+      _print('fatal: cannot make %s directory: %s' % (gpg_dir, e.strerror),
+             file=sys.stderr)
+      sys.exit(1)
+
+  env = os.environ.copy()
+  env['GNUPGHOME'] = gpg_dir.encode()
+
+  cmd = ['gpg', '--import']
+  try:
+    proc = subprocess.Popen(cmd,
+                            env = env,
+                            stdin = subprocess.PIPE)
+  except OSError as e:
+    if not quiet:
+      _print('warning: gpg (GnuPG) is not available.', file=sys.stderr)
+      _print('warning: Installing it is strongly encouraged.', file=sys.stderr)
+      _print(file=sys.stderr)
+    return False
+
+  proc.stdin.write(MAINTAINER_KEYS)
+  proc.stdin.close()
+
+  if proc.wait() != 0:
+    _print('fatal: registering repo maintainer keys failed', file=sys.stderr)
+    sys.exit(1)
+  _print()
+
+  fd = open(os.path.join(home_dot_repo, 'keyring-version'), 'w')
+  fd.write('.'.join(map(str, KEYRING_VERSION)) + '\n')
+  fd.close()
+  return True
+
+
+def _SetConfig(local, name, value):
+  """Set a git configuration option to the specified value.
+  """
+  cmd = [GIT, 'config', name, value]
+  if subprocess.Popen(cmd, cwd = local).wait() != 0:
+    raise CloneFailure()
+
+
+def _InitHttp():
+  handlers = []
+
+  mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
+  try:
+    import netrc
+    n = netrc.netrc()
+    for host in n.hosts:
+      p = n.hosts[host]
+      mgr.add_password(p[1], 'http://%s/'  % host, p[0], p[2])
+      mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
+  except:
+    pass
+  handlers.append(urllib.request.HTTPBasicAuthHandler(mgr))
+  handlers.append(urllib.request.HTTPDigestAuthHandler(mgr))
+
+  if 'http_proxy' in os.environ:
+    url = os.environ['http_proxy']
+    handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url}))
+  if 'REPO_CURL_VERBOSE' in os.environ:
+    handlers.append(urllib.request.HTTPHandler(debuglevel=1))
+    handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
+  urllib.request.install_opener(urllib.request.build_opener(*handlers))
+
+def _Fetch(url, local, src, quiet):
+  if not quiet:
+    _print('Get %s' % url, file=sys.stderr)
+
+  cmd = [GIT, 'fetch']
+  if quiet:
+    cmd.append('--quiet')
+    err = subprocess.PIPE
+  else:
+    err = None
+  cmd.append(src)
+  cmd.append('+refs/heads/*:refs/remotes/origin/*')
+  cmd.append('refs/tags/*:refs/tags/*')
+
+  proc = subprocess.Popen(cmd, cwd = local, stderr = err)
+  if err:
+    proc.stderr.read()
+    proc.stderr.close()
+  if proc.wait() != 0:
+    raise CloneFailure()
+
+def _DownloadBundle(url, local, quiet):
+  if not url.endswith('/'):
+    url += '/'
+  url += 'clone.bundle'
+
+  proc = subprocess.Popen(
+    [GIT, 'config', '--get-regexp', 'url.*.insteadof'],
+    cwd = local,
+    stdout = subprocess.PIPE)
+  for line in proc.stdout:
+    m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
+    if m:
+      new_url = m.group(1)
+      old_url = m.group(2)
+      if url.startswith(old_url):
+        url = new_url + url[len(old_url):]
+        break
+  proc.stdout.close()
+  proc.wait()
+
+  if not url.startswith('http:') and not url.startswith('https:'):
+    return False
+
+  dest = open(os.path.join(local, '.git', 'clone.bundle'), 'w+b')
+  try:
+    try:
+      r = urllib.request.urlopen(url)
+    except urllib.error.HTTPError as e:
+      if e.code in [403, 404]:
+        return False
+      _print('fatal: Cannot get %s' % url, file=sys.stderr)
+      _print('fatal: HTTP error %s' % e.code, file=sys.stderr)
+      raise CloneFailure()
+    except urllib.error.URLError as e:
+      _print('fatal: Cannot get %s' % url, file=sys.stderr)
+      _print('fatal: error %s' % e.reason, file=sys.stderr)
+      raise CloneFailure()
+    try:
+      if not quiet:
+        _print('Get %s' % url, file=sys.stderr)
+      while True:
+        buf = r.read(8192)
+        if buf == '':
+          return True
+        dest.write(buf)
+    finally:
+      r.close()
+  finally:
+    dest.close()
+
+def _ImportBundle(local):
+  path = os.path.join(local, '.git', 'clone.bundle')
+  try:
+    _Fetch(local, local, path, True)
+  finally:
+    os.remove(path)
+
+def _Clone(url, local, quiet):
+  """Clones a git repository to a new subdirectory of repodir
+  """
+  try:
+    os.mkdir(local)
+  except OSError as e:
+    _print('fatal: cannot make %s directory: %s' % (local, e.strerror),
+           file=sys.stderr)
+    raise CloneFailure()
+
+  cmd = [GIT, 'init', '--quiet']
+  try:
+    proc = subprocess.Popen(cmd, cwd = local)
+  except OSError as e:
+    _print(file=sys.stderr)
+    _print("fatal: '%s' is not available" % GIT, file=sys.stderr)
+    _print('fatal: %s' % e, file=sys.stderr)
+    _print(file=sys.stderr)
+    _print('Please make sure %s is installed and in your path.' % GIT,
+          file=sys.stderr)
+    raise CloneFailure()
+  if proc.wait() != 0:
+    _print('fatal: could not create %s' % local, file=sys.stderr)
+    raise CloneFailure()
+
+  _InitHttp()
+  _SetConfig(local, 'remote.origin.url', url)
+  _SetConfig(local, 'remote.origin.fetch',
+                    '+refs/heads/*:refs/remotes/origin/*')
+  if _DownloadBundle(url, local, quiet):
+    _ImportBundle(local)
+  else:
+    _Fetch(url, local, 'origin', quiet)
+
+
+def _Verify(cwd, branch, quiet):
+  """Verify the branch has been signed by a tag.
+  """
+  cmd = [GIT, 'describe', 'origin/%s' % branch]
+  proc = subprocess.Popen(cmd,
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE,
+                          cwd = cwd)
+  cur = proc.stdout.read().strip()
+  proc.stdout.close()
+
+  proc.stderr.read()
+  proc.stderr.close()
+
+  if proc.wait() != 0 or not cur:
+    _print(file=sys.stderr)
+    _print("fatal: branch '%s' has not been signed" % branch, file=sys.stderr)
+    raise CloneFailure()
+
+  m = re.compile(r'^(.*)-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur)
+  if m:
+    cur = m.group(1)
+    if not quiet:
+      _print(file=sys.stderr)
+      _print("info: Ignoring branch '%s'; using tagged release '%s'"
+            % (branch, cur), file=sys.stderr)
+      _print(file=sys.stderr)
+
+  env = os.environ.copy()
+  env['GNUPGHOME'] = gpg_dir.encode()
+
+  cmd = [GIT, 'tag', '-v', cur]
+  proc = subprocess.Popen(cmd,
+                          stdout = subprocess.PIPE,
+                          stderr = subprocess.PIPE,
+                          cwd = cwd,
+                          env = env)
+  out = proc.stdout.read()
+  proc.stdout.close()
+
+  err = proc.stderr.read()
+  proc.stderr.close()
+
+  if proc.wait() != 0:
+    _print(file=sys.stderr)
+    _print(out, file=sys.stderr)
+    _print(err, file=sys.stderr)
+    _print(file=sys.stderr)
+    raise CloneFailure()
+  return '%s^0' % cur
+
+
+def _Checkout(cwd, branch, rev, quiet):
+  """Checkout an upstream branch into the repository and track it.
+  """
+  cmd = [GIT, 'update-ref', 'refs/heads/default', rev]
+  if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
+    raise CloneFailure()
+
+  _SetConfig(cwd, 'branch.default.remote', 'origin')
+  _SetConfig(cwd, 'branch.default.merge', 'refs/heads/%s' % branch)
+
+  cmd = [GIT, 'symbolic-ref', 'HEAD', 'refs/heads/default']
+  if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
+    raise CloneFailure()
+
+  cmd = [GIT, 'read-tree', '--reset', '-u']
+  if not quiet:
+    cmd.append('-v')
+  cmd.append('HEAD')
+  if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
+    raise CloneFailure()
+
+
+def _FindRepo():
+  """Look for a repo installation, starting at the current directory.
+  """
+  curdir = os.getcwd()
+  repo = None
+
+  olddir = None
+  while curdir != '/' \
+    and curdir != olddir \
+    and not repo:
+    repo = os.path.join(curdir, repodir, REPO_MAIN)
+    if not os.path.isfile(repo):
+      repo = None
+      olddir = curdir
+      curdir = os.path.dirname(curdir)
+  return (repo, os.path.join(curdir, repodir))
+
+
+class _Options:
+  help = False
+
+
+def _ParseArguments(args):
+  cmd = None
+  opt = _Options()
+  arg = []
+
+  for i in range(len(args)):
+    a = args[i]
+    if a == '-h' or a == '--help':
+      opt.help = True
+
+    elif not a.startswith('-'):
+      cmd = a
+      arg = args[i + 1:]
+      break
+  return cmd, opt, arg
+
+
+def _Usage():
+  _print(
+"""usage: repo COMMAND [ARGS]
+
+repo is not yet installed.  Use "repo init" to install it here.
+
+The most commonly used repo commands are:
+
+  init      Install repo in the current working directory
+  help      Display detailed help on a command
+
+For access to the full online help, install repo ("repo init").
+""", file=sys.stderr)
+  sys.exit(1)
+
+
+def _Help(args):
+  if args:
+    if args[0] == 'init':
+      init_optparse.print_help()
+      sys.exit(0)
+    else:
+      _print("error: '%s' is not a bootstrap command.\n"
+             '        For access to online help, install repo ("repo init").'
+             % args[0], file=sys.stderr)
+  else:
+    _Usage()
+  sys.exit(1)
+
+
+def _NotInstalled():
+  _print('error: repo is not installed.  Use "repo init" to install it here.',
+         file=sys.stderr)
+  sys.exit(1)
+
+
+def _NoCommands(cmd):
+  _print("""error: command '%s' requires repo to be installed first.
+         Use "repo init" to install it here.""" % cmd, file=sys.stderr)
+  sys.exit(1)
+
+
+def _RunSelf(wrapper_path):
+  my_dir = os.path.dirname(wrapper_path)
+  my_main = os.path.join(my_dir, 'main.py')
+  my_git = os.path.join(my_dir, '.git')
+
+  if os.path.isfile(my_main) and os.path.isdir(my_git):
+    for name in ['git_config.py',
+                 'project.py',
+                 'subcmds']:
+      if not os.path.exists(os.path.join(my_dir, name)):
+        return None, None
+    return my_main, my_git
+  return None, None
+
+
+def _SetDefaultsTo(gitdir):
+  global REPO_URL
+  global REPO_REV
+
+  REPO_URL = gitdir
+  proc = subprocess.Popen([GIT,
+                           '--git-dir=%s' % gitdir,
+                           'symbolic-ref',
+                           'HEAD'],
+                          stdout = subprocess.PIPE,
+                          stderr = subprocess.PIPE)
+  REPO_REV = proc.stdout.read().strip()
+  proc.stdout.close()
+
+  proc.stderr.read()
+  proc.stderr.close()
+
+  if proc.wait() != 0:
+    _print('fatal: %s has no current branch' % gitdir, file=sys.stderr)
+    sys.exit(1)
+
+
+def main(orig_args):
+  repo_main, rel_repo_dir = _FindRepo()
+  cmd, opt, args = _ParseArguments(orig_args)
+
+  wrapper_path = os.path.abspath(__file__)
+  my_main, my_git = _RunSelf(wrapper_path)
+
+  if not repo_main:
+    if opt.help:
+      _Usage()
+    if cmd == 'help':
+      _Help(args)
+    if not cmd:
+      _NotInstalled()
+    if cmd == 'init':
+      if my_git:
+        _SetDefaultsTo(my_git)
+      try:
+        _Init(args)
+      except CloneFailure:
+        for root, dirs, files in os.walk(repodir, topdown=False):
+          for name in files:
+            os.remove(os.path.join(root, name))
+          for name in dirs:
+            os.rmdir(os.path.join(root, name))
+        os.rmdir(repodir)
+        sys.exit(1)
+      repo_main, rel_repo_dir = _FindRepo()
+    else:
+      _NoCommands(cmd)
+
+  if cmd == 'sync' and NeedSetupGnuPG():
+    SetupGnuPG(False)
+
+  if my_main:
+    repo_main = my_main
+
+  ver_str = '.'.join(map(str, VERSION))
+  me = [sys.executable, repo_main,
+        '--repo-dir=%s' % rel_repo_dir,
+        '--wrapper-version=%s' % ver_str,
+        '--wrapper-path=%s' % wrapper_path,
+        '--']
+  me.extend(orig_args)
+  me.extend(extra_args)
+  try:
+    os.execv(sys.executable, me)
+  except OSError as e:
+    _print("fatal: unable to start %s" % repo_main, file=sys.stderr)
+    _print("fatal: %s" % e, file=sys.stderr)
+    sys.exit(148)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/bootstrap/scripts/__init__.py b/bootstrap/scripts/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/scripts/__init__.py
diff --git a/bootstrap/scripts/brillo.py b/bootstrap/scripts/brillo.py
new file mode 100644
index 0000000..1841a47
--- /dev/null
+++ b/bootstrap/scripts/brillo.py
@@ -0,0 +1,59 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Bootstrap wrapper for 'brillo' command.
+
+For most commands of the form "brillo XYZ", we reinvoke
+REPO_DIR/chromite/bin/brillo XYZ, after detecting REPO_DIR based on the CWD.
+
+For the "brillo sdk" command, we reinvoke "../bin/brillo sdk" from the current
+git repository. This allows the SDK command to be run, even if there is no repo
+checkout.
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import bootstrap_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import git
+from chromite.lib import workspace_lib
+
+
+def LocateBrilloCommand(args):
+  bootstrap_path = bootstrap_lib.FindBootstrapPath(save_to_env=True)
+
+  if len(args) >= 1 and args[0] == 'sdk':
+    if not bootstrap_path:
+      cros_build_lib.Die(
+          'You are bootstrapping chromite from a repo checkout.\n'
+          'You must use a git clone. (brbug.com/580: link docs)')
+
+    # Run 'brillo sdk' from the repository containing this command.
+    return os.path.join(bootstrap_path, 'bin', 'brillo')
+
+  # If we are in a workspace, and the workspace has an associated SDK, use it.
+  workspace_path = workspace_lib.WorkspacePath()
+  if workspace_path:
+    sdk_path = bootstrap_lib.GetActiveSdkPath(bootstrap_path, workspace_path)
+    if not sdk_path:
+      cros_build_lib.Die(
+          'The current workspace has no valid SDK.\n'
+          'Please run "brillo sdk --update" (brbug.com/580: link docs)')
+
+    # Use SDK associated with workspace, or nothing.
+    return os.path.join(sdk_path, 'chromite', 'bin', 'brillo')
+
+  # Run all other commands from 'brillo' wrapper in repo detected via CWD.
+  repo_path = git.FindRepoCheckoutRoot(os.getcwd())
+  if repo_path:
+    return os.path.join(repo_path, 'chromite', 'bin', 'brillo')
+
+  # Couldn't find the real brillo command to run.
+  cros_build_lib.Die('Unable to detect which SDK you want to use.')
+
+def main(args):
+  bin_cmd = LocateBrilloCommand(args)
+  os.execv(bin_cmd, [bin_cmd] + args)
diff --git a/bootstrap/scripts/brillo_unittest b/bootstrap/scripts/brillo_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/bootstrap/scripts/brillo_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/bootstrap/scripts/brillo_unittest.py b/bootstrap/scripts/brillo_unittest.py
new file mode 100644
index 0000000..c4c5226
--- /dev/null
+++ b/bootstrap/scripts/brillo_unittest.py
@@ -0,0 +1,176 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the bootstrap brillo command."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+
+from chromite.bootstrap.scripts import brillo
+
+
+class TestBootstrapBrilloCmd(cros_test_lib.WorkspaceTestCase):
+  """Tests for the bootstrap brillo command."""
+
+  def setUp(self):
+    # Make certain we never exec anything.
+    self.mock_exec = self.PatchObject(os, 'execv', autospec=True)
+
+    self.mock_repo_root = self.PatchObject(
+        git, 'FindRepoCheckoutRoot', autospec=True)
+
+  def _verifyLocateBrilloCommand(self, expected):
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['flash']))
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['flash', '--help']))
+
+  def _verifyLocateBrilloCommandSdkHandling(self, expected):
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['sdk']))
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['sdk', '--help']))
+
+  def _verifyLocateBrilloCommandFail(self):
+    with self.assertRaises(cros_build_lib.DieSystemExit):
+      brillo.LocateBrilloCommand(['flash'])
+
+  def _verifyLocateBrilloCommandSdkFail(self):
+    with self.assertRaises(cros_build_lib.DieSystemExit):
+      brillo.LocateBrilloCommand(['sdk'])
+
+  def testCommandLookupActiveWorkspace(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+
+    sdk_wrapper = os.path.join(
+        self.bootstrap_path, 'sdk_checkouts/1.2.3/chromite/bin/brillo')
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    # We are not inside a repo.
+    self.mock_repo_root.return_value = None
+
+    self._verifyLocateBrilloCommand(sdk_wrapper)
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+    # We are inside a repo, shouldn't affect the result.
+    self.mock_repo_root.return_value = '/repo'
+
+    self._verifyLocateBrilloCommand(sdk_wrapper)
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupInactiveWorkspace(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap()
+    self.CreateWorkspace()
+    self.mock_repo_root.return_value = None
+
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    self._verifyLocateBrilloCommandFail()
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+    # Having a repo root shouldn't affect the result.
+    self.mock_repo_root.return_value = '/repo'
+
+    self._verifyLocateBrilloCommandFail()
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupRepoFromBootstrap(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace()
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = '/repo'
+
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+    repo_wrapper = '/repo/chromite/bin/brillo'
+
+    self._verifyLocateBrilloCommand(repo_wrapper)
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupBootstrapOnly(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace()
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = None
+
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    self._verifyLocateBrilloCommandFail()
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupRepoOnly(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace()
+    self.mock_bootstrap_path.return_value = None
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = '/repo'
+
+    repo_wrapper = '/repo/chromite/bin/brillo'
+
+    self._verifyLocateBrilloCommand(repo_wrapper)
+    self._verifyLocateBrilloCommandSdkFail()
+
+  def testMainInActiveWorkspace(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_repo_root.return_value = None
+
+    brillo.main(['flash', '--help'])
+
+    expected_cmd = os.path.join(
+        self.bootstrap_path, 'sdk_checkouts/1.2.3/chromite/bin/brillo')
+
+    self.assertEqual(
+        [mock.call(expected_cmd, [expected_cmd, 'flash', '--help'])],
+        self.mock_exec.call_args_list)
+
+  def testMainInRepo(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = '/repo'
+
+    brillo.main(['flash', '--help'])
+
+    expected_cmd = '/repo/chromite/bin/brillo'
+
+    self.assertEqual(
+        [mock.call(expected_cmd, [expected_cmd, 'flash', '--help'])],
+        self.mock_exec.call_args_list)
+
+  def testMainNoCmd(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = None
+
+    with self.assertRaises(cros_build_lib.DieSystemExit):
+      brillo.main(['flash', '--help'])
+
+    self.assertEqual([], self.mock_exec.call_args_list)
+
+  def testMainSdkCmd(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = None
+
+    brillo.main(['sdk', '--help'])
+
+    expected_cmd = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    self.assertEqual(
+        [mock.call(expected_cmd, [expected_cmd, 'sdk', '--help'])],
+        self.mock_exec.call_args_list)
diff --git a/bootstrap/support/chromite_wrapper b/bootstrap/support/chromite_wrapper
new file mode 100755
index 0000000..43cb14b
--- /dev/null
+++ b/bootstrap/support/chromite_wrapper
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for chromite tools.
+
+The script is intend to be symlinked to any number of chromite tools, attempts
+to find the path for chromite, and hands off to the right tool via exec if
+possible.
+
+It is intended to used strictly outside of the chroot.
+
+If you're looking at a copy and want to know where the original looks at, look
+here:
+  http://git.chromium.org/gitweb/?p=chromite.git;a=blob;f=bin/chromite
+
+Since this script is _copied_, it should remain small and not use internal libs.
+
+"""
+
+import errno
+import os
+import sys
+
+# Due to historical reasons, and the fact depot_tools ToT is used by older
+# factory branches (lacking chromite script cleanups), note we have to
+# fallback to some odd import locations.  This is the only reason for the
+# fallback code- any/all new scripts symlinked to this script *must* exist
+# in chromite/bin/ .
+
+def _FindChromite(path):
+  """Find the chromite dir in a repo, gclient, or submodule checkout."""
+  path = os.path.abspath(path)
+  # Depending on the checkout type (whether repo chromeos or gclient chrome)
+  # Chromite lives in a different location.
+  roots = (
+    ('.repo', 'chromite/.git'),
+    ('.gclient', 'src/third_party/chromite/.git'),
+    ('src/.gitmodules', 'src/third_party/chromite/.git'),
+  )
+
+  while path != '/':
+    for root, chromite_git_dir in roots:
+      if all(os.path.exists(os.path.join(path, x))
+             for x in [root, chromite_git_dir]):
+        return os.path.dirname(os.path.join(path, chromite_git_dir))
+    path = os.path.dirname(path)
+  return None
+
+
+def _MissingErrorOut(target):
+  sys.stderr.write(
+"""ERROR: Couldn't find the chromite tool %s.
+
+Please change to a directory inside your Chromium OS source tree
+and retry.  If you need to setup a Chromium OS source tree, see
+  http://www.chromium.org/chromium-os/developer-guide
+""" % target)
+  return 127
+
+
+def main():
+  chromite_dir = _FindChromite(os.getcwd())
+  target = os.path.basename(sys.argv[0])
+  if chromite_dir is None:
+    return _MissingErrorOut(target)
+
+  path = os.path.join(chromite_dir, 'bin', target)
+  try:
+    os.execv(path, [path] + sys.argv[1:])
+  except EnvironmentError, e:
+    if e.errno not in (errno.ENOENT, errno.EPERM):
+      raise
+
+  # Reaching here means it's either a bad target, or we're working against
+  # an old (pre 6be2efcf5bb575b03862113eec097c44d8d7f93e) revision of
+  # chromite.  Fallback to trying to import it; this code works at least as
+  # far back as branch 0.11.241.B; likely further.
+
+  if target == 'cbuildbot':
+    target = 'chromite.buildbot.cbuildbot'
+  else:
+    target = 'chromite.bin.%s' % (target,)
+
+  # Adjust the path importation so we can import our our target.
+  sys.path.insert(0, os.path.dirname(chromite_dir))
+
+  try:
+    module = __import__(target, fromlist=['main'])
+  except ImportError:
+    return _MissingErrorOut(target)
+  return module.main()
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/cbuildbot/__init__.py b/cbuildbot/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cbuildbot/__init__.py
diff --git a/cbuildbot/afdo.py b/cbuildbot/afdo.py
new file mode 100644
index 0000000..be22e73
--- /dev/null
+++ b/cbuildbot/afdo.py
@@ -0,0 +1,504 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the various utilities to build Chrome with AFDO.
+
+For a description of AFDO see gcc.gnu.org/wiki/AutoFDO.
+"""
+
+from __future__ import print_function
+
+import datetime
+import os
+import re
+
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import timeout_util
+
+
+# AFDO-specific constants.
+# Chrome URL where AFDO data is stored.
+AFDO_PROD_URL = 'gs://chromeos-prebuilt/afdo-job/canonicals/'
+AFDO_TEST_URL = '%s/afdo-job/canonicals/' % constants.TRASH_BUCKET
+AFDO_BASE_URL = AFDO_PROD_URL
+AFDO_CHROOT_ROOT = os.path.join('%(build_root)s', constants.DEFAULT_CHROOT_DIR)
+AFDO_LOCAL_DIR = os.path.join('%(root)s', 'tmp')
+AFDO_BUILDROOT_LOCAL = AFDO_LOCAL_DIR % {'root': AFDO_CHROOT_ROOT}
+CHROME_ARCH_VERSION = '%(package)s-%(arch)s-%(version)s'
+CHROME_PERF_AFDO_FILE = '%s.perf.data' % CHROME_ARCH_VERSION
+CHROME_PERF_AFDO_URL = '%s%s.bz2' % (AFDO_BASE_URL, CHROME_PERF_AFDO_FILE)
+CHROME_AFDO_FILE = '%s.afdo' % CHROME_ARCH_VERSION
+CHROME_AFDO_URL = '%s%s.bz2' % (AFDO_BASE_URL, CHROME_AFDO_FILE)
+CHROME_ARCH_RELEASE = '%(package)s-%(arch)s-%(release)s'
+LATEST_CHROME_AFDO_FILE = 'latest-%s.afdo' % CHROME_ARCH_RELEASE
+LATEST_CHROME_AFDO_URL = AFDO_BASE_URL + LATEST_CHROME_AFDO_FILE
+CHROME_DEBUG_BIN = os.path.join('%(root)s',
+                                'build/%(board)s/usr/lib/debug',
+                                'opt/google/chrome/chrome.debug')
+CHROME_DEBUG_BIN_URL = '%s%s.debug.bz2' % (AFDO_BASE_URL, CHROME_ARCH_VERSION)
+
+AFDO_GENERATE_GCOV_TOOL = '/usr/bin/create_gcov'
+
+# regex to find AFDO file for specific architecture within the ebuild file.
+CHROME_EBUILD_AFDO_EXP = r'^(?P<bef>AFDO_FILE\["%s"\]=")(?P<name>.*)(?P<aft>")'
+# and corresponding replacement string.
+CHROME_EBUILD_AFDO_REPL = r'\g<bef>%s\g<aft>'
+
+# How old can the AFDO data be? (in days).
+AFDO_ALLOWED_STALE = 7
+
+# TODO(llozano): Currently using sandybridge boards. We should move to
+# a more modern platform.
+# Set of boards that can generate the AFDO profile (can generate 'perf'
+# data with LBR events).
+AFDO_DATA_GENERATORS = ('butterfly', 'lumpy', 'parrot', 'stumpy')
+
+# For a given architecture, which architecture is used to generate
+# the AFDO profile. Some architectures are not able to generate their
+# own profile.
+AFDO_ARCH_GENERATORS = {'amd64': 'amd64',
+                        'arm': 'amd64',
+                        'x86': 'amd64'}
+
+AFDO_ALERT_RECIPIENTS = ['chromeos-toolchain@google.com']
+
+
+class MissingAFDOData(failures_lib.StepFailure):
+  """Exception thrown when necessary AFDO data is missing."""
+
+
+class MissingAFDOMarkers(failures_lib.StepFailure):
+  """Exception thrown when necessary ebuild markers for AFDO are missing."""
+
+
+def CompressAFDOFile(to_compress, buildroot):
+  """Compress file used by AFDO process.
+
+  Args:
+    to_compress: File to compress.
+    buildroot: buildroot where to store the compressed data.
+
+  Returns:
+    Name of the compressed data file.
+  """
+  local_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  dest = os.path.join(local_dir, os.path.basename(to_compress)) + '.bz2'
+  cros_build_lib.CompressFile(to_compress, dest)
+  return dest
+
+
+def UncompressAFDOFile(to_decompress, buildroot):
+  """Decompress file used by AFDO process.
+
+  Args:
+    to_decompress: File to decompress.
+    buildroot: buildroot where to store the decompressed data.
+  """
+  local_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  basename = os.path.basename(to_decompress)
+  dest_basename = basename.rsplit('.', 1)[0]
+  dest = os.path.join(local_dir, dest_basename)
+  cros_build_lib.UncompressFile(to_decompress, dest)
+  return dest
+
+
+def GSUploadIfNotPresent(gs_context, src, dest):
+  """Upload a file to GS only if the file does not exist.
+
+  Will not generate an error if the file already exist in GS. It will
+  only emit a warning.
+
+  I could use GSContext.Copy(src,dest,version=0) here but it does not seem
+  to work for large files. Using GSContext.Exists(dest) instead. See
+  crbug.com/395858.
+
+  Args:
+    gs_context: GS context instance.
+    src: File to copy.
+    dest: Destination location.
+
+  Returns:
+    True if file was uploaded. False otherwise.
+  """
+  if gs_context.Exists(dest):
+    logging.warning('File %s already in GS', dest)
+    return False
+  else:
+    gs_context.Copy(src, dest, acl='public-read')
+    return True
+
+
+def GetAFDOPerfDataURL(cpv, arch):
+  """Return the location URL for the AFDO per data file.
+
+  Build the URL for the 'perf' data file given the release and architecture.
+
+  Args:
+    cpv: The portage_util.CPV object for chromeos-chrome.
+    arch: architecture we're going to build Chrome for.
+
+  Returns:
+    URL of the location of the 'perf' data file.
+  """
+
+  # The file name of the perf data is based only in the chrome version.
+  # The test case that produces it does not know anything about the
+  # revision number.
+  # TODO(llozano): perf data filename should include the revision number.
+  version_number = cpv.version_no_rev.split('_')[0]
+  chrome_spec = {'package': cpv.package,
+                 'arch': arch,
+                 'version': version_number}
+  return CHROME_PERF_AFDO_URL % chrome_spec
+
+
+def CheckAFDOPerfData(cpv, arch, gs_context):
+  """Check whether AFDO perf data exists for the given architecture.
+
+  Check if 'perf' data file for this architecture and release is available
+  in GS.
+
+  Args:
+    cpv: The portage_util.CPV object for chromeos-chrome.
+    arch: architecture we're going to build Chrome for.
+    gs_context: GS context to retrieve data.
+
+  Returns:
+    True if AFDO perf data is available. False otherwise.
+  """
+  url = GetAFDOPerfDataURL(cpv, arch)
+  if not gs_context.Exists(url):
+    logging.info('Could not find AFDO perf data at %s', url)
+    return False
+
+  logging.info('Found AFDO perf data at %s', url)
+  return True
+
+
+def WaitForAFDOPerfData(cpv, arch, buildroot, gs_context,
+                        timeout=constants.AFDO_GENERATE_TIMEOUT):
+  """Wait for AFDO perf data to show up (with an appropriate timeout).
+
+  Wait for AFDO 'perf' data to show up in GS and copy it into a temp
+  directory in the buildroot.
+
+  Args:
+    arch: architecture we're going to build Chrome for.
+    cpv: CPV object for Chrome.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve data.
+    timeout: How long to wait total, in seconds.
+
+  Returns:
+    True if found the AFDO perf data before the timeout expired.
+    False otherwise.
+  """
+  try:
+    timeout_util.WaitForReturnTrue(
+        CheckAFDOPerfData,
+        func_args=(cpv, arch, gs_context),
+        timeout=timeout, period=constants.SLEEP_TIMEOUT)
+  except timeout_util.TimeoutError:
+    logging.info('Could not find AFDO perf data before timeout')
+    return False
+
+  url = GetAFDOPerfDataURL(cpv, arch)
+  dest_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  dest_path = os.path.join(dest_dir, url.rsplit('/', 1)[1])
+  gs_context.Copy(url, dest_path)
+
+  UncompressAFDOFile(dest_path, buildroot)
+  logging.info('Retrieved AFDO perf data to %s', dest_path)
+  return True
+
+
+def PatchChromeEbuildAFDOFile(ebuild_file, arch_profiles):
+  """Patch the Chrome ebuild with the dictionary of {arch: afdo_file} pairs.
+
+  Args:
+    ebuild_file: path of the ebuild file within the chroot.
+    arch_profiles: {arch: afdo_file} pairs to put into the ebuild.
+  """
+  original_ebuild = path_util.FromChrootPath(ebuild_file)
+  modified_ebuild = '%s.new' % original_ebuild
+
+  arch_patterns = {}
+  arch_repls = {}
+  arch_markers = {}
+  for arch in arch_profiles.keys():
+    arch_patterns[arch] = re.compile(CHROME_EBUILD_AFDO_EXP % arch)
+    arch_repls[arch] = CHROME_EBUILD_AFDO_REPL % arch_profiles[arch]
+    arch_markers[arch] = False
+
+  with open(original_ebuild, 'r') as original:
+    with open(modified_ebuild, 'w') as modified:
+      for line in original:
+        for arch in arch_profiles.keys():
+          matched = arch_patterns[arch].match(line)
+          if matched:
+            arch_markers[arch] = True
+            modified.write(arch_patterns[arch].sub(arch_repls[arch], line))
+            break
+        else: # line without markers, just copy it.
+          modified.write(line)
+
+  for arch, found in arch_markers.iteritems():
+    if not found:
+      raise MissingAFDOMarkers('Chrome ebuild file does not have appropriate '
+                               'AFDO markers for arch %s' % arch)
+
+  os.rename(modified_ebuild, original_ebuild)
+
+
+def UpdateChromeEbuildAFDOFile(board, arch_profiles):
+  """Update chrome ebuild with the dictionary of {arch: afdo_file} pairs.
+
+  Modifies the Chrome ebuild to set the appropriate AFDO file for each
+  given architecture. Regenerates the associated Manifest file and
+  commits the new ebuild and Manifest.
+
+  Args:
+    board: board we are building Chrome for.
+    arch_profiles: {arch: afdo_file} pairs to put into the ebuild.
+  """
+  # Find the Chrome ebuild file.
+  equery_prog = 'equery'
+  ebuild_prog = 'ebuild'
+  if board:
+    equery_prog += '-%s' % board
+    ebuild_prog += '-%s' % board
+
+  equery_cmd = [equery_prog, 'w', 'chromeos-chrome']
+  ebuild_file = cros_build_lib.RunCommand(equery_cmd,
+                                          enter_chroot=True,
+                                          redirect_stdout=True).output.rstrip()
+
+  # Patch the ebuild file with the names of the available afdo_files.
+  PatchChromeEbuildAFDOFile(ebuild_file, arch_profiles)
+
+  # Also patch the 9999 ebuild. This is necessary because the uprev
+  # process starts from the 9999 ebuild file and then compares to the
+  # current version to see if the uprev is really necessary. We dont
+  # want the names of the available afdo_files to show as differences.
+  # It also allows developers to do USE=afdo_use when using the 9999
+  # ebuild.
+  ebuild_9999 = os.path.join(os.path.dirname(ebuild_file),
+                             'chromeos-chrome-9999.ebuild')
+  PatchChromeEbuildAFDOFile(ebuild_9999, arch_profiles)
+
+  # Regenerate the Manifest file.
+  ebuild_gs_dir = None
+  # If using the GS test location, pass this location to the
+  # chrome ebuild.
+  if AFDO_BASE_URL == AFDO_TEST_URL:
+    ebuild_gs_dir = {'AFDO_GS_DIRECTORY': AFDO_TEST_URL}
+  gen_manifest_cmd = [ebuild_prog, ebuild_file, 'manifest', '--force']
+  cros_build_lib.RunCommand(gen_manifest_cmd, enter_chroot=True,
+                            extra_env=ebuild_gs_dir, print_cmd=True)
+
+  ebuild_dir = path_util.FromChrootPath(os.path.dirname(ebuild_file))
+  git.RunGit(ebuild_dir, ['add', 'Manifest'])
+
+  # Check if anything changed compared to the previous version.
+  mod_files = ['Manifest', os.path.basename(ebuild_file),
+               os.path.basename(ebuild_9999)]
+  modifications = git.RunGit(ebuild_dir,
+                             ['status', '--porcelain', '--'] + mod_files,
+                             capture_output=True, print_cmd=True).output
+  if not modifications:
+    logging.info('AFDO info for the Chrome ebuild did not change. '
+                 'Nothing to commit')
+    return
+
+  # If there are changes to ebuild or Manifest, commit them.
+  commit_msg = ('"Set {arch: afdo_file} pairs %s and updated Manifest"'
+                % arch_profiles)
+  git.RunGit(ebuild_dir,
+             ['commit', '-m', commit_msg, '--'] + mod_files,
+             print_cmd=True)
+
+
+def VerifyLatestAFDOFile(afdo_release_spec, buildroot, gs_context):
+  """Verify that the latest AFDO profile for a release is suitable.
+
+  Find the latest AFDO profile file for a particular release and check
+  that it is not too stale. The latest AFDO profile name for a release
+  can be found in a file in GS under the name
+  latest-chrome-<arch>-<release>.afdo.
+
+  Args:
+    afdo_release_spec: architecture and release to find the latest AFDO
+        profile for.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve data.
+
+  Returns:
+    The name of the AFDO profile file if a suitable one was found.
+    None otherwise.
+  """
+  latest_afdo_url = LATEST_CHROME_AFDO_URL % afdo_release_spec
+
+  # Check if latest-chrome-<arch>-<release>.afdo exists.
+  try:
+    latest_detail = gs_context.List(latest_afdo_url, details=True)
+  except gs.GSNoSuchKey:
+    logging.info('Could not find latest AFDO info file %s' % latest_afdo_url)
+    return None
+
+  # Verify the AFDO profile file is not too stale.
+  mod_date = latest_detail[0].creation_time
+  curr_date = datetime.datetime.now()
+  allowed_stale_days = datetime.timedelta(days=AFDO_ALLOWED_STALE)
+  if (curr_date - mod_date) > allowed_stale_days:
+    logging.info('Found latest AFDO info file %s but it is too old' %
+                 latest_afdo_url)
+    return None
+
+  # Then get the name of the latest valid AFDO profile file.
+  local_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  latest_afdo_file = LATEST_CHROME_AFDO_FILE % afdo_release_spec
+  latest_afdo_path = os.path.join(local_dir, latest_afdo_file)
+  gs_context.Copy(latest_afdo_url, latest_afdo_path)
+
+  return osutils.ReadFile(latest_afdo_path).strip()
+
+
+def GetLatestAFDOFile(cpv, arch, buildroot, gs_context):
+  """Try to find the latest suitable AFDO profile file.
+
+  Try to find the latest AFDO profile generated for current release
+  and architecture. If there is none, check the previous release (mostly
+  in case we have just branched).
+
+  Args:
+    cpv: cpv object for Chrome.
+    arch: architecture for which we are looking for AFDO profile.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve data.
+
+  Returns:
+    Name of latest suitable AFDO profile file if one is found.
+    None otherwise.
+  """
+  generator_arch = AFDO_ARCH_GENERATORS[arch]
+  version_number = cpv.version
+  current_release = version_number.split('.')[0]
+  afdo_release_spec = {'package': cpv.package,
+                       'arch': generator_arch,
+                       'release': current_release}
+  afdo_file = VerifyLatestAFDOFile(afdo_release_spec, buildroot, gs_context)
+  if afdo_file:
+    return afdo_file
+
+  # Could not find suitable AFDO file for the current release.
+  # Let's see if there is one from the previous release.
+  previous_release = str(int(current_release) - 1)
+  prev_release_spec = {'package': cpv.package,
+                       'arch': generator_arch,
+                       'release': previous_release}
+  return VerifyLatestAFDOFile(prev_release_spec, buildroot, gs_context)
+
+
+def GenerateAFDOData(cpv, arch, board, buildroot, gs_context):
+  """Generate AFDO profile data from 'perf' data.
+
+  Given the 'perf' profile, generate an AFDO profile using create_gcov.
+  It also creates a latest-chrome-<arch>-<release>.afdo file pointing
+  to the generated AFDO profile.
+  Uploads the generated data to GS for retrieval by the chrome ebuild
+  file when doing an 'afdo_use' build.
+  It is possible the generated data has previously been uploaded to GS
+  in which case this routine will not upload the data again. Uploading
+  again may cause verication failures for the ebuild file referencing
+  the previous contents of the data.
+
+  Args:
+    cpv: cpv object for Chrome.
+    arch: architecture for which we are looking for AFDO profile.
+    board: board we are building for.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve/store data.
+
+  Returns:
+    Name of the AFDO profile file generated if successful.
+  """
+  CHROME_UNSTRIPPED_NAME = 'chrome.unstripped'
+
+  version_number = cpv.version
+  afdo_spec = {'package': cpv.package,
+               'arch': arch,
+               'version': version_number}
+  chroot_root = AFDO_CHROOT_ROOT % {'build_root': buildroot}
+  local_dir = AFDO_LOCAL_DIR % {'root': chroot_root}
+  in_chroot_local_dir = AFDO_LOCAL_DIR % {'root': ''}
+
+  # Upload compressed chrome debug binary to GS for triaging purposes.
+  # TODO(llozano): This simplifies things in case of need of triaging
+  # problems but is it really necessary?
+  debug_bin = CHROME_DEBUG_BIN % {'root': chroot_root,
+                                  'board': board}
+  comp_debug_bin_path = CompressAFDOFile(debug_bin, buildroot)
+  GSUploadIfNotPresent(gs_context, comp_debug_bin_path,
+                       CHROME_DEBUG_BIN_URL % afdo_spec)
+
+  # create_gcov demands the name of the profiled binary exactly matches
+  # the name of the unstripped binary or it is named 'chrome.unstripped'.
+  # So create a symbolic link with the appropriate name.
+  local_debug_sym = os.path.join(local_dir, CHROME_UNSTRIPPED_NAME)
+  in_chroot_debug_bin = CHROME_DEBUG_BIN % {'root': '', 'board': board}
+  osutils.SafeUnlink(local_debug_sym)
+  os.symlink(in_chroot_debug_bin, local_debug_sym)
+
+  # Call create_gcov tool to generated AFDO profile from 'perf' profile
+  # and upload it to GS. Need to call from within chroot since this tool
+  # was built inside chroot.
+  debug_sym = os.path.join(in_chroot_local_dir, CHROME_UNSTRIPPED_NAME)
+  # The name of the 'perf' file is based only on the version of chrome. The
+  # revision number is not included.
+  afdo_spec_no_rev = {'package': cpv.package,
+                      'arch': arch,
+                      'version': cpv.version_no_rev.split('_')[0]}
+  perf_afdo_file = CHROME_PERF_AFDO_FILE % afdo_spec_no_rev
+  perf_afdo_path = os.path.join(in_chroot_local_dir, perf_afdo_file)
+  afdo_file = CHROME_AFDO_FILE % afdo_spec
+  afdo_path = os.path.join(in_chroot_local_dir, afdo_file)
+  afdo_cmd = [AFDO_GENERATE_GCOV_TOOL,
+              '--binary=%s' % debug_sym,
+              '--profile=%s' % perf_afdo_path,
+              '--gcov=%s' % afdo_path]
+  cros_build_lib.RunCommand(afdo_cmd, enter_chroot=True, capture_output=True,
+                            print_cmd=True)
+
+  afdo_local_path = os.path.join(local_dir, afdo_file)
+  comp_afdo_path = CompressAFDOFile(afdo_local_path, buildroot)
+  uploaded_afdo_file = GSUploadIfNotPresent(gs_context, comp_afdo_path,
+                                            CHROME_AFDO_URL % afdo_spec)
+
+  if uploaded_afdo_file:
+    # Create latest-chrome-<arch>-<release>.afdo pointing to the name
+    # of the AFDO profile file and upload to GS.
+    current_release = version_number.split('.')[0]
+    afdo_release_spec = {'package': cpv.package,
+                         'arch': arch,
+                         'release': current_release}
+    latest_afdo_file = LATEST_CHROME_AFDO_FILE % afdo_release_spec
+    latest_afdo_path = os.path.join(local_dir, latest_afdo_file)
+    osutils.WriteFile(latest_afdo_path, afdo_file)
+    gs_context.Copy(latest_afdo_path,
+                    LATEST_CHROME_AFDO_URL % afdo_release_spec,
+                    acl='public-read')
+
+  return afdo_file
+
+
+def CanGenerateAFDOData(board):
+  """Does this board has the capability of generating its own AFDO data?."""
+  return board in AFDO_DATA_GENERATORS
diff --git a/cbuildbot/archive_lib.py b/cbuildbot/archive_lib.py
new file mode 100644
index 0000000..a4cef35
--- /dev/null
+++ b/cbuildbot/archive_lib.py
@@ -0,0 +1,184 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module with utilities for archiving functionality."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+
+
+def GetBaseUploadURI(config, archive_base=None, bot_id=None,
+                     remote_trybot=False):
+  """Get the base URL where artifacts from this builder are uploaded.
+
+  Each build run stores its artifacts in a subdirectory of the base URI.
+  We also have LATEST files under the base URI which help point to the
+  latest build available for a given builder.
+
+  Args:
+    config: The build config to examine.
+    archive_base: Optional. The root URL under which objects from all
+      builders are uploaded. If not specified, we use the default archive
+      bucket.
+    bot_id: The bot ID to archive files under.
+    remote_trybot: Whether this is a remote trybot run. This is used to
+      make sure that uploads from remote trybot runs do not conflict with
+      uploads from production builders.
+
+  Returns:
+    Google Storage URI (i.e. 'gs://...') under which all archived files
+      should be uploaded.  In other words, a path like a directory, even
+      through GS has no real directories.
+  """
+  if not bot_id:
+    bot_id = config.GetBotId(remote_trybot=remote_trybot)
+
+  if archive_base:
+    return '%s/%s' % (archive_base, bot_id)
+  elif remote_trybot or config.gs_path == config_lib.GS_PATH_DEFAULT:
+    return '%s/%s' % (constants.DEFAULT_ARCHIVE_BUCKET, bot_id)
+  else:
+    return config.gs_path
+
+
+def GetUploadACL(config):
+  """Get the ACL we should use to upload artifacts for a given config."""
+  if config.internal:
+    # Use the bucket default ACL.
+    return None
+
+  return 'public-read'
+
+
+class Archive(object):
+  """Class to represent the archive for one builder run.
+
+  An Archive object is a read-only object with attributes and methods useful
+  for archive purposes.  Most of the attributes are supported as properties
+  because they depend on the ChromeOS version and if they are calculated too
+  soon (i.e. before the sync stage) they will raise an exception.
+
+  Attributes:
+    archive_path: The full local path where output from this builder is stored.
+    download_url: The URL where we can download artifacts.
+    upload_url: The Google Storage location where we should upload artifacts.
+    version: The ChromeOS version for this archive.
+  """
+
+  _BUILDBOT_ARCHIVE = 'buildbot_archive'
+  _TRYBOT_ARCHIVE = 'trybot_archive'
+
+  def __init__(self, bot_id, version_getter, options, config):
+    """Initialize.
+
+    Args:
+      bot_id: The bot id associated with this archive.
+      version_getter: Functor that should return the ChromeOS version for
+        this run when called, if the version is known.  Typically, this
+        is BuilderRun.GetVersion.
+      options: The command options object for this run.
+      config: The build config for this run.
+    """
+    self._options = options
+    self._config = config
+    self._version_getter = version_getter
+    self._version = None
+
+    self.bot_id = bot_id
+
+  @property
+  def version(self):
+    if self._version is None:
+      self._version = self._version_getter()
+
+    return self._version
+
+  @property
+  def archive_path(self):
+    return os.path.join(self.GetLocalArchiveRoot(), self.bot_id, self.version)
+
+  @property
+  def upload_url(self):
+    base_upload_url = GetBaseUploadURI(
+        self._config,
+        archive_base=self._options.archive_base,
+        bot_id=self.bot_id,
+        remote_trybot=self._options.remote_trybot)
+    return '%s/%s' % (base_upload_url, self.version)
+
+  @property
+  def upload_acl(self):
+    """Get the ACL we should use to upload artifacts for a given config."""
+    return GetUploadACL(self._config)
+
+  @property
+  def download_url(self):
+    if self._options.buildbot or self._options.remote_trybot:
+      # Translate the gs:// URI to the URL for downloading the same files.
+      return self.upload_url.replace('gs://', gs.PRIVATE_BASE_HTTPS_URL)
+    else:
+      return self.archive_path
+
+  def GetLocalArchiveRoot(self, trybot=None):
+    """Return the location on disk where archive images are kept."""
+    buildroot = os.path.abspath(self._options.buildroot)
+
+    if trybot is None:
+      trybot = not self._options.buildbot or self._options.debug
+
+    archive_base = self._TRYBOT_ARCHIVE if trybot else self._BUILDBOT_ARCHIVE
+    return os.path.join(buildroot, archive_base)
+
+  def SetupArchivePath(self):
+    """Create a fresh directory for archiving a build."""
+    logging.info('Preparing local archive directory at "%s".',
+                 self.archive_path)
+    if self._options.buildbot:
+      # Buildbot: Clear out any leftover build artifacts, if present, for
+      # this particular run.  The Clean stage is responsible for trimming
+      # back the number of archive paths to the last X runs.
+      osutils.RmDir(self.archive_path, ignore_missing=True)
+    else:
+      # Clear the list of uploaded file if it exists.  In practice, the Clean
+      # stage deletes everything in the archive root, so this may not be
+      # doing anything at all.
+      osutils.SafeUnlink(os.path.join(self.archive_path,
+                                      commands.UPLOADED_LIST_FILENAME))
+
+    osutils.SafeMakedirs(self.archive_path)
+
+  def UpdateLatestMarkers(self, manifest_branch, debug, upload_urls=None):
+    """Update the LATEST markers in GS archive area.
+
+    Args:
+      manifest_branch: The name of the branch in the manifest for this run.
+      debug: Boolean debug value for this run.
+      upload_urls: Google storage urls to upload the Latest Markers to.
+    """
+    if not upload_urls:
+      upload_urls = [self.upload_url]
+    # self.version will be one of these forms, shown through examples:
+    # R35-1234.5.6 or R35-1234.5.6-b123.  In either case, we want "1234.5.6".
+    version_marker = self.version.split('-')[1]
+
+    filenames = ('LATEST-%s' % manifest_branch,
+                 'LATEST-%s' % version_marker)
+    base_archive_path = os.path.dirname(self.archive_path)
+    base_upload_urls = [os.path.dirname(url) for url in upload_urls]
+    for base_upload_url in base_upload_urls:
+      for filename in filenames:
+        latest_path = os.path.join(base_archive_path, filename)
+        osutils.WriteFile(latest_path, self.version, mode='w')
+        commands.UploadArchivedFile(
+            base_archive_path, [base_upload_url], filename,
+            debug, acl=self.upload_acl)
diff --git a/cbuildbot/archive_lib_unittest b/cbuildbot/archive_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/archive_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/archive_lib_unittest.py b/cbuildbot/archive_lib_unittest.py
new file mode 100644
index 0000000..96dc2c7
--- /dev/null
+++ b/cbuildbot/archive_lib_unittest.py
@@ -0,0 +1,221 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the archive_lib module."""
+
+from __future__ import print_function
+
+import mock
+
+from chromite.cbuildbot import archive_lib
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel_unittest
+
+
+DEFAULT_ARCHIVE_PREFIX = 'bogus_bucket/TheArchiveBase'
+DEFAULT_ARCHIVE_BASE = 'gs://%s' % DEFAULT_ARCHIVE_PREFIX
+DEFAULT_BUILDROOT = '/tmp/foo/bar/buildroot'
+DEFAULT_BUILDNUMBER = 12345
+DEFAULT_BRANCH = 'TheBranch'
+DEFAULT_CHROME_BRANCH = 'TheChromeBranch'
+DEFAULT_VERSION_STRING = 'TheVersionString'
+DEFAULT_BOARD = 'TheBoard'
+DEFAULT_BOT_NAME = 'TheCoolBot'
+
+# Access to protected member.
+# pylint: disable=W0212
+
+DEFAULT_OPTIONS = cros_test_lib.EasyAttr(
+    archive_base=DEFAULT_ARCHIVE_BASE,
+    buildroot=DEFAULT_BUILDROOT,
+    buildnumber=DEFAULT_BUILDNUMBER,
+    buildbot=True,
+    branch=DEFAULT_BRANCH,
+    remote_trybot=False,
+    debug=False,
+)
+DEFAULT_CONFIG = config_lib.BuildConfig(
+    name=DEFAULT_BOT_NAME,
+    master=True,
+    boards=[DEFAULT_BOARD],
+    child_configs=[config_lib.BuildConfig(name='foo'),
+                   config_lib.BuildConfig(name='bar'),
+                  ],
+)
+
+
+def _ExtendDefaultOptions(**kwargs):
+  """Extend DEFAULT_OPTIONS with keys/values in kwargs."""
+  options_kwargs = DEFAULT_OPTIONS.copy()
+  options_kwargs.update(kwargs)
+  return cros_test_lib.EasyAttr(**options_kwargs)
+
+
+def _ExtendDefaultConfig(**kwargs):
+  """Extend DEFAULT_CONFIG with keys/values in kwargs."""
+  config_kwargs = DEFAULT_CONFIG.copy()
+  config_kwargs.update(kwargs)
+  return config_lib.BuildConfig(**config_kwargs)
+
+
+def _NewBuilderRun(options=None, config=None):
+  """Create a BuilderRun objection from options and config values.
+
+  Args:
+    options: Specify options or default to DEFAULT_OPTIONS.
+    config: Specify build config or default to DEFAULT_CONFIG.
+
+  Returns:
+    BuilderRun object.
+  """
+  manager = parallel_unittest.FakeMultiprocessManager()
+  options = options or DEFAULT_OPTIONS
+  config = config or DEFAULT_CONFIG
+  site_config = config_lib_unittest.MockSiteConfig()
+  site_config[config.name] = config
+
+  return cbuildbot_run.BuilderRun(options, site_config, config, manager)
+
+
+class GetBaseUploadURITest(cros_test_lib.TestCase):
+  """Test the GetBaseUploadURI function."""
+
+  ARCHIVE_BASE = '/tmp/the/archive/base'
+  BOT_ID = 'TheNewBotId'
+
+  def setUp(self):
+    self.cfg = DEFAULT_CONFIG
+
+  def _GetBaseUploadURI(self, *args, **kwargs):
+    """Test GetBaseUploadURI with archive_base and no bot_id."""
+    return archive_lib.GetBaseUploadURI(self.cfg, *args, **kwargs)
+
+  def testArchiveBaseRemoteTrybotFalse(self):
+    expected_result = '%s/%s' % (self.ARCHIVE_BASE, DEFAULT_BOT_NAME)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+  def testArchiveBaseRemoteTrybotTrue(self):
+    expected_result = '%s/trybot-%s' % (self.ARCHIVE_BASE, DEFAULT_BOT_NAME)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testArchiveBaseBotIdRemoteTrybotFalse(self):
+    expected_result = '%s/%s' % (self.ARCHIVE_BASE, self.BOT_ID)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    bot_id=self.BOT_ID, remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+  def testArchiveBaseBotIdRemoteTrybotTrue(self):
+    expected_result = '%s/%s' % (self.ARCHIVE_BASE, self.BOT_ID)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    bot_id=self.BOT_ID, remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testRemoteTrybotTrue(self):
+    """Test GetBaseUploadURI with no archive base but remote_trybot is True."""
+    expected_result = ('%s/trybot-%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        DEFAULT_BOT_NAME))
+    result = self._GetBaseUploadURI(remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testBotIdRemoteTrybotTrue(self):
+    expected_result = ('%s/%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        self.BOT_ID))
+    result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testDefaultGSPathRemoteTrybotFalse(self):
+    """Test GetBaseUploadURI with default gs_path value in config."""
+    self.cfg = _ExtendDefaultConfig(gs_path=config_lib.GS_PATH_DEFAULT)
+
+    # Test without bot_id.
+    expected_result = ('%s/%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        DEFAULT_BOT_NAME))
+    result = self._GetBaseUploadURI(remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+    # Test with bot_id.
+    expected_result = ('%s/%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        self.BOT_ID))
+    result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+  def testOverrideGSPath(self):
+    """Test GetBaseUploadURI with default gs_path value in config."""
+    self.cfg = _ExtendDefaultConfig(gs_path='gs://funkytown/foo/bar')
+
+    # Test without bot_id.
+    expected_result = self.cfg.gs_path
+    result = self._GetBaseUploadURI(remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+    # Test with bot_id.
+    expected_result = self.cfg.gs_path
+    result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+
+class ArchiveTest(cros_test_lib.TestCase):
+  """Test the Archive class."""
+  _VERSION = '6543.2.1'
+
+  def _GetAttributeValue(self, attr, options=None, config=None):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = self._VERSION
+
+      run = _NewBuilderRun(options, config)
+      return getattr(run.GetArchive(), attr)
+
+  def testVersion(self):
+    value = self._GetAttributeValue('version')
+    self.assertEqual(self._VERSION, value)
+
+  def testVersionNotReady(self):
+    run = _NewBuilderRun()
+    self.assertRaises(AttributeError, getattr, run, 'version')
+
+  def testArchivePathTrybot(self):
+    options = _ExtendDefaultOptions(buildbot=False)
+    value = self._GetAttributeValue('archive_path', options=options)
+    expected_value = ('%s/%s/%s/%s' %
+                      (DEFAULT_BUILDROOT,
+                       archive_lib.Archive._TRYBOT_ARCHIVE,
+                       DEFAULT_BOT_NAME,
+                       self._VERSION))
+    self.assertEqual(expected_value, value)
+
+  def testArchivePathBuildbot(self):
+    value = self._GetAttributeValue('archive_path')
+    expected_value = ('%s/%s/%s/%s' %
+                      (DEFAULT_BUILDROOT,
+                       archive_lib.Archive._BUILDBOT_ARCHIVE,
+                       DEFAULT_BOT_NAME,
+                       self._VERSION))
+    self.assertEqual(expected_value, value)
+
+  def testUploadUri(self):
+    value = self._GetAttributeValue('upload_url')
+    expected_value = '%s/%s/%s' % (DEFAULT_ARCHIVE_BASE,
+                                   DEFAULT_BOT_NAME,
+                                   self._VERSION)
+    self.assertEqual(expected_value, value)
+
+  def testDownloadURLBuildbot(self):
+    value = self._GetAttributeValue('download_url')
+    expected_value = ('%s%s/%s/%s' %
+                      (archive_lib.gs.PRIVATE_BASE_HTTPS_URL,
+                       DEFAULT_ARCHIVE_PREFIX,
+                       DEFAULT_BOT_NAME,
+                       self._VERSION))
+    self.assertEqual(expected_value, value)
diff --git a/cbuildbot/autotest_rpc_errors.py b/cbuildbot/autotest_rpc_errors.py
new file mode 100644
index 0000000..816df9f
--- /dev/null
+++ b/cbuildbot/autotest_rpc_errors.py
@@ -0,0 +1,23 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Error codes used for the Autotest RPC Client, Proxy, and Server.
+
+This is a copy of scripts/slave-internal/autotest_rpc/autotest_rpc_errors.py
+from https://chrome-internal.googlesource.com/chrome/tools/build.
+"""
+
+PROXY_CANNOT_SEND_REQUEST = 11
+PROXY_CONNECTION_LOST = 12
+PROXY_TIMED_OUT = 13
+
+SERVER_NO_COMMAND = 21
+SERVER_NO_ARGUMENTS = 22
+SERVER_UNKNOWN_COMMAND = 23
+SERVER_BAD_ARGUMENT_COUNT = 24
+
+CLIENT_CANNOT_CONNECT = 31
+CLIENT_HTTP_CODE = 32
+CLIENT_EMPTY_RESPONSE = 33
+CLIENT_NO_RETURN_CODE = 34
diff --git a/cbuildbot/binhost.py b/cbuildbot/binhost.py
new file mode 100644
index 0000000..a8ef86c
--- /dev/null
+++ b/cbuildbot/binhost.py
@@ -0,0 +1,321 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for calculating compatible binhosts."""
+
+from __future__ import print_function
+
+import collections
+import json
+import os
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import parallel
+
+
+# A unique identifier for looking up CompatIds by board/useflags.
+_BoardKey = collections.namedtuple('_BoardKey', ['board', 'useflags'])
+
+
+def BoardKey(board, useflags):
+  """Create a new _BoardKey object.
+
+  Args:
+    board: The board associated with this config.
+    useflags: A sequence of extra useflags associated with this config.
+  """
+  return _BoardKey(board, tuple(useflags))
+
+
+def GetBoardKey(config, board=None):
+  """Get the BoardKey associated with a given config.
+
+  Args:
+    config: A config_lib.BuildConfig object.
+    board: Board to use. Defaults to the first board in the config.
+      Optional if len(config.boards) == 1.
+  """
+  if board is None:
+    assert len(config.boards) == 1
+    board = config.boards[0]
+  else:
+    assert board in config.boards
+  return BoardKey(board, config.useflags)
+
+
+def GetAllImportantBoardKeys(site_config):
+  """Get a list of all board keys used in a top-level config.
+
+  Args:
+    site_config: A config_lib.SiteConfig instance.
+  """
+  boards = set()
+  for config in site_config.values():
+    if config.important:
+      for board in config.boards:
+        boards.add(GetBoardKey(config, board))
+  return boards
+
+
+def GetChromePrebuiltConfigs(site_config):
+  """Get a mapping of the boards used in the Chrome PFQ.
+
+  Args:
+    site_config: A config_lib.SiteConfig instance.
+
+  Returns:
+    A dict mapping BoardKey objects to configs.
+  """
+  boards = {}
+  master_chromium_pfq = site_config['master-chromium-pfq']
+  for config in site_config.GetSlavesForMaster(master_chromium_pfq):
+    if config.prebuilts:
+      for board in config.boards:
+        boards[GetBoardKey(config, board)] = config
+  return boards
+
+
+# A tuple of dicts describing our Chrome PFQs.
+# by_compat_id: A dict mapping CompatIds to sets of BoardKey objects.
+# by_arch_useflags: A dict mapping (arch, useflags) tuples to sets of
+#     BoardKey objects.
+_PrebuiltMapping = collections.namedtuple(
+    '_PrebuiltMapping', ['by_compat_id', 'by_arch_useflags'])
+
+
+class PrebuiltMapping(_PrebuiltMapping):
+  """A tuple of dicts describing our Chrome PFQs.
+
+  Members:
+    by_compat_id: A dict mapping CompatIds to sets of BoardKey objects.
+    by_arch_useflags: A dict mapping (arch, useflags) tuples to sets of
+      BoardKey objects.
+  """
+
+  # The location in a ChromeOS checkout where we should store our JSON dump.
+  INTERNAL_MAP_LOCATION = ('%s/src/private-overlays/chromeos-partner-overlay/'
+                           'chromeos/binhost/%s.json')
+
+  # The location in an external Chromium OS checkout where we should store our
+  # JSON dump.
+  EXTERNAL_MAP_LOCATION = ('%s/src/third_party/chromiumos-overlay/chromeos/'
+                           'binhost/%s.json')
+
+  @classmethod
+  def GetFilename(cls, buildroot, suffix, internal=True):
+    """Get the filename where we should store our JSON dump.
+
+    Args:
+      buildroot: The root of the source tree.
+      suffix: The base filename used for the dump (e.g. "chrome").
+      internal: If true, use the internal binhost location. Otherwise, use the
+        public one.
+    """
+    if internal:
+      return cls.INTERNAL_MAP_LOCATION % (buildroot, suffix)
+
+    return cls.EXTERNAL_MAP_LOCATION % (buildroot, suffix)
+
+  @classmethod
+  def Get(cls, keys, compat_ids):
+    """Get a mapping of the Chrome PFQ configs.
+
+    Args:
+      keys: A list of the BoardKey objects that are considered part of the
+        Chrome PFQ.
+      compat_ids: A dict mapping BoardKey objects to CompatId objects.
+
+    Returns:
+      A PrebuiltMapping object.
+    """
+    configs = cls(by_compat_id=collections.defaultdict(set),
+                  by_arch_useflags=collections.defaultdict(set))
+    for key in keys:
+      compat_id = compat_ids[key]
+      configs.by_compat_id[compat_id].add(key)
+      partial_compat_id = (compat_id.arch, compat_id.useflags)
+      configs.by_arch_useflags[partial_compat_id].add(key)
+    return configs
+
+  def Dump(self, filename, internal=True):
+    """Save a mapping of the Chrome PFQ configs to disk (JSON format).
+
+    Args:
+      filename: A location to write the Chrome PFQ configs.
+      internal: Whether the dump should include internal configurations.
+    """
+    output = []
+    for compat_id, keys in self.by_compat_id.items():
+      for key in keys:
+        # Filter internal prebuilts out of external dumps.
+        if not internal and 'chrome_internal' in key.useflags:
+          continue
+
+        output.append({'key': key.__dict__, 'compat_id': compat_id.__dict__})
+
+    with open(filename, 'w') as f:
+      json.dump(output, f, sort_keys=True, indent=2)
+
+  @classmethod
+  def Load(cls, filename):
+    """Load a mapping of the Chrome PFQ configs from disk (JSON format).
+
+    Args:
+      filename: A location to read the Chrome PFQ configs from.
+    """
+    with open(filename) as f:
+      output = json.load(f)
+
+    compat_ids = {}
+    for d in output:
+      key = BoardKey(**d['key'])
+      compat_ids[key] = CompatId(**d['compat_id'])
+
+    return cls.Get(compat_ids.keys(), compat_ids)
+
+  def GetPrebuilts(self, compat_id):
+    """Get the matching BoardKey objects associated with |compat_id|.
+
+    Args:
+      compat_id: The CompatId to use to look up prebuilts.
+    """
+    if compat_id in self.by_compat_id:
+      return self.by_compat_id[compat_id]
+
+    partial_compat_id = (compat_id.arch, compat_id.useflags)
+    if partial_compat_id in self.by_arch_useflags:
+      return self.by_arch_useflags[partial_compat_id]
+
+    return set()
+
+
+def GetChromeUseFlags(board, extra_useflags):
+  """Get a list of the use flags turned on for Chrome on a given board.
+
+  This function requires that the board has been set up first (e.g. using
+  GenConfigsForBoard)
+
+  Args:
+    board: The board to use.
+    extra_useflags: A sequence of use flags to enable or disable.
+
+  Returns:
+    A tuple of the use flags that are enabled for Chrome on the given board.
+    Use flags that are disabled are not listed.
+  """
+  assert cros_build_lib.IsInsideChroot()
+  assert os.path.exists('/build/%s' % board), 'Board %s not set up' % board
+  extra_env = {'USE': ' '.join(extra_useflags)}
+  cmd = ['equery-%s' % board, 'uses', constants.CHROME_CP]
+  chrome_useflags = cros_build_lib.RunCommand(
+      cmd, capture_output=True, print_cmd=False,
+      extra_env=extra_env).output.rstrip().split()
+  return tuple(x[1:] for x in chrome_useflags if x.startswith('+'))
+
+
+def GenConfigsForBoard(board, regen, error_code_ok):
+  """Set up the configs for the specified board.
+
+  This must be run from within the chroot. It sets up the board but does not
+  fully initialize it (it skips the initialization of the toolchain and the
+  board packages)
+
+  Args:
+    board: Board to set up.
+    regen: Whether to regen configs if the board already exists.
+    error_code_ok: Whether errors are acceptable. We set this to True in some
+      tests for configs that are not on the waterfall.
+  """
+  assert cros_build_lib.IsInsideChroot()
+  if regen or not os.path.exists('/build/%s' % board):
+    cmd = ['%s/src/scripts/setup_board' % constants.CHROOT_SOURCE_ROOT,
+           '--board=%s' % board, '--regen_configs', '--skip_toolchain_update',
+           '--skip_chroot_upgrade', '--skip_board_pkg_init', '--quiet']
+    cros_build_lib.RunCommand(cmd, error_code_ok=error_code_ok)
+
+
+_CompatId = collections.namedtuple('_CompatId', ['arch', 'useflags', 'cflags'])
+
+
+def CompatId(arch, useflags, cflags):
+  """Create a new _CompatId object.
+
+  Args:
+    arch: The architecture of this builder.
+    useflags: The full list of use flags for Chrome.
+    cflags: The full list of CFLAGS.
+  """
+  return _CompatId(arch, tuple(useflags), tuple(cflags))
+
+
+def CalculateCompatId(board, extra_useflags):
+  """Calculate the CompatId for board with the specified extra useflags.
+
+  This function requires that the board has been set up first (e.g. using
+  GenConfigsForBoard)
+
+  Args:
+    board: The board to use.
+    extra_useflags: A sequence of use flags to enable or disable.
+
+  Returns:
+    A CompatId object for the board with the specified extra_useflags.
+  """
+  assert cros_build_lib.IsInsideChroot()
+  useflags = GetChromeUseFlags(board, extra_useflags)
+  cmd = ['portageq-%s' % board, 'envvar', 'ARCH', 'CFLAGS']
+  arch_cflags = cros_build_lib.RunCommand(
+      cmd, print_cmd=False, capture_output=True).output.rstrip()
+  arch, cflags = arch_cflags.split('\n', 1)
+  cflags_split = cflags.split()
+
+  # We will add -clang-syntax to falco and nyan board. So we need to
+  # filter out -clang-syntax to make the flags from PFQ are the same as
+  # the release-board. See crbug.com/499115
+  # TODO(yunlian): Remove this when all the boards are build with -clang-syntax
+  if '-clang-syntax' in cflags_split:
+    cflags_split.remove('-clang-syntax')
+  return CompatId(arch, useflags, cflags_split)
+
+
+class CompatIdFetcher(object):
+  """Class for calculating CompatIds in parallel."""
+
+  def __init__(self, caching=False):
+    """Create a new CompatIdFetcher object.
+
+    Args:
+      caching: Whether to cache setup from run to run. See
+        PrebuiltCompatibilityTest.CACHING for details.
+    """
+    self.compat_ids = None
+    if caching:
+      # This import occurs here rather than at the top of the file because we
+      # don't want to force developers to install joblib. The caching argument
+      # is only set to True if PrebuiltCompatibilityTest.CACHING is hand-edited
+      # (for testing purposes).
+      # pylint: disable=import-error
+      from joblib import Memory
+      memory = Memory(cachedir=tempfile.gettempdir(), verbose=0)
+      self.FetchCompatIds = memory.cache(self.FetchCompatIds)
+
+  def _FetchCompatId(self, board, extra_useflags):
+    self.compat_ids[(board, extra_useflags)] = (
+        CalculateCompatId(board, extra_useflags))
+
+  def FetchCompatIds(self, board_keys):
+    """Generate a dict mapping BoardKeys to their associated CompatId.
+
+    Args:
+      board_keys: A list of BoardKey objects to fetch.
+    """
+    # pylint: disable=method-hidden
+    logging.info('Fetching CompatId objects...')
+    with parallel.Manager() as manager:
+      self.compat_ids = manager.dict()
+      parallel.RunTasksInProcessPool(self._FetchCompatId, board_keys)
+      return dict(self.compat_ids)
diff --git a/cbuildbot/binhost_test b/cbuildbot/binhost_test
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/binhost_test
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/binhost_test.py b/cbuildbot/binhost_test.py
new file mode 100644
index 0000000..0bab437
--- /dev/null
+++ b/cbuildbot/binhost_test.py
@@ -0,0 +1,272 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for verifying prebuilts."""
+
+from __future__ import print_function
+
+import collections
+import inspect
+import os
+import unittest
+import warnings
+
+from chromite.cbuildbot import binhost
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+
+class PrebuiltCompatibilityTest(cros_test_lib.TestCase):
+  """Ensure that prebuilts are present for all builders and are compatible."""
+
+  # Whether to cache setup from run to run. If set, requires that you install
+  # joblib (sudo easy_install joblib). This is useful for iterating on the
+  # unit tests, but note that if you 'repo sync', you'll need to clear out
+  # /tmp/joblib and blow away /build in order to update the caches. Note that
+  # this is never normally set to True -- if you want to use this feature,
+  # you'll need to hand-edit this file.
+  # TODO(davidjames): Add a --caching option.
+  CACHING = False
+
+  # A dict mapping BoardKeys to their associated compat ids.
+  COMPAT_IDS = None
+
+  # Boards that don't have Chromium PFQs.
+  # TODO(davidjames): Empty this list.
+  BOARDS_WITHOUT_CHROMIUM_PFQS = ['rush_ryu', 'smaug']
+
+  site_config = config_lib.LoadConfigFromFile()
+
+  @classmethod
+  def setUpClass(cls):
+    assert cros_build_lib.IsInsideChroot()
+    logging.info('Generating board configs. This takes about 10m...')
+    board_keys = binhost.GetAllImportantBoardKeys(cls.site_config)
+    boards = set(key.board for key in board_keys)
+    for board in sorted(boards):
+      binhost.GenConfigsForBoard(board, regen=not cls.CACHING,
+                                 error_code_ok=False)
+    fetcher = binhost.CompatIdFetcher(caching=cls.CACHING)
+    cls.COMPAT_IDS = fetcher.FetchCompatIds(list(board_keys))
+
+  def setUp(self):
+    self.complaints = []
+    self.fatal_complaints = []
+
+  def tearDown(self):
+    if self.complaints:
+      warnings.warn('\n' + '\n'.join(self.complaints))
+    if self.fatal_complaints:
+      self.assertFalse(self.fatal_complaints, '\n'.join(self.fatal_complaints))
+
+  def Complain(self, msg, fatal):
+    """Complain about an error when the test exits.
+
+    Args:
+      msg: The message to print.
+      fatal: Whether the message should be fatal. If not, the message will be
+        considered a warning.
+    """
+    if fatal:
+      self.fatal_complaints.append(msg)
+    else:
+      self.complaints.append(msg)
+
+  def GetCompatIdDiff(self, expected, actual):
+    """Return a string describing the differences between expected and actual.
+
+    Args:
+      expected: Expected value for CompatId.
+      actual: Actual value for CompatId.
+    """
+    if expected.arch != actual.arch:
+      return 'arch differs: %s != %s' % (expected.arch, actual.arch)
+    elif expected.useflags != actual.useflags:
+      msg = self.GetSequenceDiff(expected.useflags, actual.useflags)
+      return msg.replace('Sequences', 'useflags')
+    elif expected.cflags != actual.cflags:
+      msg = self.GetSequenceDiff(expected.cflags, actual.cflags)
+      return msg.replace('Sequences', 'cflags')
+    else:
+      assert expected == actual
+      return 'no differences'
+
+  def AssertChromePrebuilts(self, pfq_configs, config, skip_useflags=False):
+    """Verify that the specified config has Chrome prebuilts.
+
+    Args:
+      pfq_configs: A PrebuiltMapping object.
+      config: The config to check.
+      skip_useflags: Don't use extra useflags from the config.
+    """
+    # Skip over useflags from the useflag if needed.
+    msg_prefix = ''
+    if skip_useflags:
+      config = config.deepcopy()
+      config.useflags = []
+      msg_prefix = 'When we take out extra useflags, '
+
+    compat_id = self.GetCompatId(config)
+    pfqs = pfq_configs.by_compat_id.get(compat_id, set())
+    if not pfqs:
+      arch_useflags = (compat_id.arch, compat_id.useflags)
+      for key in pfq_configs.by_arch_useflags[arch_useflags]:
+        # If there wasn't an exact match for this CompatId, but there
+        # was an (arch, useflags) match, then we'll be using mismatched
+        # Chrome prebuilts. Complain.
+        # TODO(davidjames): This should be a fatal error for important
+        # builders, but we need to clean up existing cases first.
+        pfq_compat_id = self.COMPAT_IDS[key]
+        err = self.GetCompatIdDiff(compat_id, pfq_compat_id)
+        msg = '%s%s uses mismatched Chrome prebuilts from %s -- %s'
+        self.Complain(msg % (msg_prefix, config.name, key.board, err),
+                      fatal=False)
+        pfqs.add(key)
+
+    if not pfqs:
+      pre_cq = (config.build_type == config_lib.CONFIG_TYPE_PRECQ)
+      msg = '%s%s cannot find Chrome prebuilts -- %s'
+      self.Complain(msg % (msg_prefix, config.name, compat_id),
+                    fatal=pre_cq or config.important)
+
+  def GetCompatId(self, config, board=None):
+    """Get the CompatId for a config.
+
+    Args:
+      config: A config_lib.BuildConfig object.
+      board: Board to use. Defaults to the first board in the config.
+          Optional if len(config.boards) == 1.
+    """
+    if board is None:
+      assert len(config.boards) == 1
+      board = config.boards[0]
+    else:
+      assert board in config.boards
+
+    board_key = binhost.GetBoardKey(config, board)
+    compat_id = self.COMPAT_IDS.get(board_key)
+    if compat_id is None:
+      compat_id = binhost.CalculateCompatId(board, config.useflags)
+      self.COMPAT_IDS[board_key] = compat_id
+    return compat_id
+
+  def testChromePrebuiltsPresent(self, filename=None):
+    """Verify Chrome prebuilts exist for all configs that build Chrome.
+
+    Args:
+      filename: Filename to load our PFQ mappings from. By default, generate
+        the PFQ mappings based on the current config.
+    """
+    if filename is not None:
+      pfq_configs = binhost.PrebuiltMapping.Load(filename)
+    else:
+      keys = binhost.GetChromePrebuiltConfigs(self.site_config).keys()
+      pfq_configs = binhost.PrebuiltMapping.Get(keys, self.COMPAT_IDS)
+
+    for compat_id, pfqs in pfq_configs.by_compat_id.items():
+      if len(pfqs) > 1:
+        msg = 'The following Chrome PFQs produce identical prebuilts: %s -- %s'
+        self.Complain(msg % (', '.join(str(x) for x in pfqs), compat_id),
+                      fatal=False)
+
+    for _name, config in sorted(self.site_config.items()):
+      # Skip over configs that don't have Chrome or have >1 board.
+      if config.sync_chrome is False or len(config.boards) != 1:
+        continue
+
+      # Look for boards with missing prebuilts.
+      pre_cq = (config.build_type == config_lib.CONFIG_TYPE_PRECQ)
+      if ((config.usepkg_build_packages and not config.chrome_rev) and
+          (config.active_waterfall or pre_cq)):
+        self.AssertChromePrebuilts(pfq_configs, config)
+
+        # Check that we have a builder for the version w/o custom useflags as
+        # well.
+        if (config.useflags and
+            config.boards[0] not in self.BOARDS_WITHOUT_CHROMIUM_PFQS):
+          self.AssertChromePrebuilts(pfq_configs, config, skip_useflags=True)
+
+  def testCurrentChromePrebuiltsEnough(self):
+    """Verify Chrome prebuilts exist for all configs that build Chrome.
+
+    This loads the list of Chrome prebuilts that were generated during the last
+    Chrome PFQ run from disk and verifies that it is sufficient.
+    """
+    filename = binhost.PrebuiltMapping.GetFilename(constants.SOURCE_ROOT,
+                                                   'chrome')
+    if os.path.exists(filename):
+      self.testChromePrebuiltsPresent(filename)
+
+  def testReleaseGroupSharing(self):
+    """Verify that the boards built in release groups have compatible settings.
+
+    This means that all of the subconfigs in the release group have matching
+    use flags, cflags, and architecture.
+    """
+    for config in self.site_config.values():
+      # Only test release groups.
+      if not config.name.endswith('-release-group'):
+        continue
+
+      # Get a list of the compatibility IDs.
+      compat_ids_for_config = collections.defaultdict(set)
+      for subconfig in config.child_configs:
+        if subconfig.sync_chrome is not False:
+          for board in subconfig.boards:
+            compat_id = self.GetCompatId(subconfig, board)
+            compat_ids_for_config[compat_id].add(board)
+
+      if len(compat_ids_for_config) > 1:
+        arch_useflags = set(tuple(x[:-1]) for x in compat_ids_for_config)
+        if len(arch_useflags) > 1:
+          # If two configs in the same group have mismatched Chrome binaries
+          # (e.g. different use flags), Chrome may be built twice in parallel
+          # and this may result in flaky, slow, and possibly incorrect builds.
+          msg = '%s: %s and %s have mismatched Chrome binaries -- %s'
+          fatal = True
+        else:
+          # TODO(davidjames): This should be marked fatal once the
+          # ivybridge-freon-release-group is cleaned up.
+          msg = '%s: %s and %s have mismatched cflags -- %s'
+          fatal = False
+        ids, board_sets = zip(*compat_ids_for_config.iteritems())
+        boards = [next(iter(x)) for x in board_sets]
+        err = self.GetCompatIdDiff(ids[0], ids[1])
+        msg %= (config.name, boards[0], boards[1], err)
+        self.Complain(msg, fatal=fatal)
+
+  def testDumping(self):
+    """Verify Chrome prebuilts exist for all configs that build Chrome.
+
+    This loads the list of Chrome prebuilts that were generated during the last
+    Chrome PFQ run from disk and verifies that it is sufficient.
+    """
+    with osutils.TempDir() as tempdir:
+      keys = binhost.GetChromePrebuiltConfigs(self.site_config).keys()
+      pfq_configs = binhost.PrebuiltMapping.Get(keys, self.COMPAT_IDS)
+      filename = os.path.join(tempdir, 'foo.json')
+      pfq_configs.Dump(filename)
+      self.assertEqual(pfq_configs, binhost.PrebuiltMapping.Load(filename))
+
+
+def NoIncremental():
+  """Creates a suite containing only non-incremental tests.
+
+  This suite should be used on the Chrome PFQ as we don't need to preserve
+  incremental compatibility of prebuilts.
+
+  Returns:
+    A unittest.TestSuite that does not contain any incremental tests.
+  """
+  suite = unittest.TestSuite()
+  method_names = [f[0] for f in inspect.getmembers(PrebuiltCompatibilityTest,
+                                                   predicate=inspect.ismethod)]
+  for m in method_names:
+    if m.startswith('test') and m != 'testCurrentChromePrebuiltsEnough':
+      suite.addTest(PrebuiltCompatibilityTest(m))
+  return suite
diff --git a/cbuildbot/builders/__init__.py b/cbuildbot/builders/__init__.py
new file mode 100644
index 0000000..33a4309
--- /dev/null
+++ b/cbuildbot/builders/__init__.py
@@ -0,0 +1,90 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for instantiating builders.
+
+Typically builder classes/objects are obtained indirectly via the helpers in
+this module.  This is because the config_lib settings can't import this
+module (and children directly): it might lead to circular references, and it
+would add a lot of overhead to that module.  Generally only the main cbuildbot
+module needs to care about the builder classes.
+
+If you're looking for a specific builder implementation, then check out the
+*_builders.py modules that are in this same directory.  The config_lib
+has a builder_class_name member that controls the type of builder that is used
+for each config.  e.g. builder_class_name='Simple' would look for the class
+whose name is 'SimpleBuilder' in all the *_builders.py modules.
+"""
+
+from __future__ import print_function
+
+import glob
+import os
+
+from chromite.lib import cros_import
+
+
+def GetBuilderClass(name):
+  """Locate the builder class with |name|.
+
+  Examples:
+    If you want to create a new SimpleBuilder, you'd do:
+    cls = builders.GetBuilderClass('simple_builders.SimpleBuilder')
+    builder = cls(...)
+
+    If you want a site specific builder class, do:
+    cls = builders.GetBuilderClass('config.my_builders.MyBuilder')
+    builder = cls(...)
+
+  Args:
+    name: The base name of the builder class.
+
+  Returns:
+    The class used to instantiate this type of builder.
+
+  Raises:
+    AttributeError when |name| could not be found.
+  """
+  if '.' not in name:
+    raise ValueError('name should be "<module>.<builder>" not "%s"' % name)
+
+  name_parts = name.split('.')
+
+  # Last part is the class name.
+  builder_class_name = name_parts.pop()
+
+  if name_parts[0] == 'config':
+    # config means pull from the site specific config.
+    # config.my_builders -> chromite.config.my_builders
+    name_parts = ['chromite'] + name_parts
+  else:
+    # Otherwise pull from chromite.
+    # simple_builders -> chromite.cbuidlbot.builders.simple_builders
+    name_parts = ['chromite', 'cbuildbot', 'builders'] + name_parts
+
+  target = '.'.join(name_parts)
+  module = cros_import.ImportModule(target)
+
+  # See if this module has the builder we care about.
+  if hasattr(module, builder_class_name):
+    return getattr(module, builder_class_name)
+
+  raise AttributeError('could not locate %s builder' % builder_class_name)
+
+
+def Builder(builder_run):
+  """Given a |builder_run| runtime, return an instantiated builder
+
+  This is a helper wrapper that resolves the builder_class_name field in the
+  builder settings (which was declared in the build config) to the actual class
+  found in the builder modules.
+
+  Args:
+    builder_run: A cbuildbot_run.BuilderRun object.
+
+  Returns:
+    An object of type generic_builders.Builder.
+  """
+  cls = GetBuilderClass(builder_run.config.builder_class_name)
+  return cls(builder_run)
diff --git a/cbuildbot/builders/builders_unittest b/cbuildbot/builders/builders_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/builders/builders_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/builders/builders_unittest.py b/cbuildbot/builders/builders_unittest.py
new file mode 100644
index 0000000..0843651
--- /dev/null
+++ b/cbuildbot/builders/builders_unittest.py
@@ -0,0 +1,56 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for main builder logic (__init__.py)."""
+
+from __future__ import print_function
+
+import mock
+
+from chromite.cbuildbot import builders
+from chromite.cbuildbot.builders import simple_builders
+from chromite.lib import cros_import
+from chromite.lib import cros_test_lib
+
+
+class ModuleTest(cros_test_lib.MockTempDirTestCase):
+  """Module loading related tests"""
+
+  def testGetBuilderClass(self):
+    """Check behavior when requesting a valid builder."""
+    result = builders.GetBuilderClass('simple_builders.SimpleBuilder')
+    self.assertEqual(result, simple_builders.SimpleBuilder)
+
+  def testGetBuilderClassError(self):
+    """Check behavior when requesting missing builders."""
+    self.assertRaises(ValueError, builders.GetBuilderClass, 'Foalksdjo')
+    self.assertRaises(ImportError, builders.GetBuilderClass, 'foo.Foalksdjo')
+    self.assertRaises(AttributeError, builders.GetBuilderClass,
+                      'misc_builders.Foalksdjo')
+
+  def testGetBuilderClassConfig(self):
+    """Check behavior when requesting config builders.
+
+    This can't be done with live classes since the site config may or may not
+    be there.
+    """
+    # Setup
+    mock_module = mock.Mock()
+    mock_module.MyBuilder = 'fake_class'
+    mock_import = self.PatchObject(cros_import, 'ImportModule',
+                                   return_value=mock_module)
+    # Test
+    result = builders.GetBuilderClass('config.my_builders.MyBuilder')
+    # Verify
+    mock_import.assert_called_once_with('chromite.config.my_builders')
+    self.assertEqual(result, 'fake_class')
+
+    # Test again with a nested builder class name.
+    mock_import.reset_mock()
+
+    # Test
+    result = builders.GetBuilderClass('config.nested.my_builders.MyBuilder')
+    # Verify
+    mock_import.assert_called_once_with('chromite.config.nested.my_builders')
+    self.assertEqual(result, 'fake_class')
diff --git a/cbuildbot/builders/generic_builders.py b/cbuildbot/builders/generic_builders.py
new file mode 100644
index 0000000..5614774
--- /dev/null
+++ b/cbuildbot/builders/generic_builders.py
@@ -0,0 +1,341 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the generic builders."""
+
+from __future__ import print_function
+
+import multiprocessing
+import os
+import sys
+import tempfile
+import traceback
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import trybot_patch_pool
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import report_stages
+from chromite.cbuildbot.stages import sync_stages
+from chromite.lib import cidb
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import parallel
+
+
+class Builder(object):
+  """Parent class for all builder types.
+
+  This class functions as an abstract parent class for various build types.
+  Its intended use is builder_instance.Run().
+
+  Attributes:
+    _run: The BuilderRun object for this run.
+    archive_stages: Dict of BuildConfig keys to ArchiveStage values.
+    patch_pool: TrybotPatchPool.
+  """
+
+  def __init__(self, builder_run):
+    """Initializes instance variables. Must be called by all subclasses."""
+    self._run = builder_run
+
+    # TODO: all the fields below should not be part of the generic builder.
+    # We need to restructure our SimpleBuilder and see about creating a new
+    # base in there for holding them.
+    if self._run.config.chromeos_official:
+      os.environ['CHROMEOS_OFFICIAL'] = '1'
+
+    self.archive_stages = {}
+    self.patch_pool = trybot_patch_pool.TrybotPatchPool()
+    self._build_image_lock = multiprocessing.Lock()
+
+  def Initialize(self):
+    """Runs through the initialization steps of an actual build."""
+    if self._run.options.resume:
+      results_lib.LoadCheckpoint(self._run.buildroot)
+
+    self._RunStage(report_stages.BuildStartStage)
+
+    self._RunStage(build_stages.CleanUpStage)
+
+  def _GetStageInstance(self, stage, *args, **kwargs):
+    """Helper function to get a stage instance given the args.
+
+    Useful as almost all stages just take in builder_run.
+    """
+    # Normally the default BuilderRun (self._run) is used, but it can
+    # be overridden with "builder_run" kwargs (e.g. for child configs).
+    builder_run = kwargs.pop('builder_run', self._run)
+    return stage(builder_run, *args, **kwargs)
+
+  def _SetReleaseTag(self):
+    """Sets run.attrs.release_tag from the manifest manager used in sync.
+
+    Must be run after sync stage as syncing enables us to have a release tag,
+    and must be run before any usage of attrs.release_tag.
+
+    TODO(mtennant): Find a bottleneck place in syncing that can set this
+    directly.  Be careful, as there are several kinds of syncing stages, and
+    sync stages have been known to abort with sys.exit calls.
+    """
+    manifest_manager = getattr(self._run.attrs, 'manifest_manager', None)
+    if manifest_manager:
+      self._run.attrs.release_tag = manifest_manager.current_version
+    else:
+      self._run.attrs.release_tag = None
+
+    logging.debug('Saved release_tag value for run: %r',
+                  self._run.attrs.release_tag)
+
+  def _RunStage(self, stage, *args, **kwargs):
+    """Wrapper to run a stage.
+
+    Args:
+      stage: A BuilderStage class.
+      args: args to pass to stage constructor.
+      kwargs: kwargs to pass to stage constructor.
+
+    Returns:
+      Whatever the stage's Run method returns.
+    """
+    stage_instance = self._GetStageInstance(stage, *args, **kwargs)
+    return stage_instance.Run()
+
+  @staticmethod
+  def _RunParallelStages(stage_objs):
+    """Run the specified stages in parallel.
+
+    Args:
+      stage_objs: BuilderStage objects.
+    """
+    steps = [stage.Run for stage in stage_objs]
+    try:
+      parallel.RunParallelSteps(steps)
+
+    except BaseException as ex:
+      # If a stage threw an exception, it might not have correctly reported
+      # results (e.g. because it was killed before it could report the
+      # results.) In this case, attribute the exception to any stages that
+      # didn't report back correctly (if any).
+      for stage in stage_objs:
+        for name in stage.GetStageNames():
+          if not results_lib.Results.StageHasResults(name):
+            results_lib.Results.Record(name, ex, str(ex))
+
+      if cidb.CIDBConnectionFactory.IsCIDBSetup():
+        db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+        if db:
+          for stage in stage_objs:
+            for build_stage_id in stage.GetBuildStageIDs():
+              if not db.HasBuildStageFailed(build_stage_id):
+                failures_lib.ReportStageFailureToCIDB(db,
+                                                      build_stage_id,
+                                                      ex)
+
+      raise
+
+  def _RunSyncStage(self, sync_instance):
+    """Run given |sync_instance| stage and be sure attrs.release_tag set."""
+    try:
+      sync_instance.Run()
+    finally:
+      self._SetReleaseTag()
+
+  def SetVersionInfo(self):
+    """Sync the builder's version info with the buildbot runtime."""
+    self._run.attrs.version_info = self.GetVersionInfo()
+
+  def GetVersionInfo(self):
+    """Returns a manifest_version.VersionInfo object for this build.
+
+    Subclasses must override this method.
+    """
+    raise NotImplementedError()
+
+  def GetSyncInstance(self):
+    """Returns an instance of a SyncStage that should be run.
+
+    Subclasses must override this method.
+    """
+    raise NotImplementedError()
+
+  def GetCompletionInstance(self):
+    """Returns the MasterSlaveSyncCompletionStage for this build.
+
+    Subclasses may override this method.
+
+    Returns:
+      None
+    """
+    return None
+
+  def RunStages(self):
+    """Subclasses must override this method.  Runs the appropriate code."""
+    raise NotImplementedError()
+
+  def _ReExecuteInBuildroot(self, sync_instance):
+    """Reexecutes self in buildroot and returns True if build succeeds.
+
+    This allows the buildbot code to test itself when changes are patched for
+    buildbot-related code.  This is a no-op if the buildroot == buildroot
+    of the running chromite checkout.
+
+    Args:
+      sync_instance: Instance of the sync stage that was run to sync.
+
+    Returns:
+      True if the Build succeeded.
+    """
+    if not self._run.options.resume:
+      results_lib.WriteCheckpoint(self._run.options.buildroot)
+
+    args = sync_stages.BootstrapStage.FilterArgsForTargetCbuildbot(
+        self._run.options.buildroot, constants.PATH_TO_CBUILDBOT,
+        self._run.options)
+
+    # Specify a buildroot explicitly (just in case, for local trybot).
+    # Suppress any timeout options given from the commandline in the
+    # invoked cbuildbot; our timeout will enforce it instead.
+    args += ['--resume', '--timeout', '0', '--notee', '--nocgroups',
+             '--buildroot', os.path.abspath(self._run.options.buildroot)]
+
+    # Set --version. Note that --version isn't legal without --buildbot.
+    if (self._run.options.buildbot and
+        hasattr(self._run.attrs, 'manifest_manager')):
+      ver = self._run.attrs.manifest_manager.current_version
+      args += ['--version', ver]
+
+    pool = getattr(sync_instance, 'pool', None)
+    if pool:
+      filename = os.path.join(self._run.options.buildroot,
+                              'validation_pool.dump')
+      pool.Save(filename)
+      args += ['--validation_pool', filename]
+
+    # Reset the cache dir so that the child will calculate it automatically.
+    if not self._run.options.cache_dir_specified:
+      commandline.BaseParser.ConfigureCacheDir(None)
+
+    with tempfile.NamedTemporaryFile(prefix='metadata') as metadata_file:
+      metadata_file.write(self._run.attrs.metadata.GetJSON())
+      metadata_file.flush()
+      args += ['--metadata_dump', metadata_file.name]
+
+      # Re-run the command in the buildroot.
+      # Finally, be generous and give the invoked cbuildbot 30s to shutdown
+      # when something occurs.  It should exit quicker, but the sigterm may
+      # hit while the system is particularly busy.
+      return_obj = cros_build_lib.RunCommand(
+          args, cwd=self._run.options.buildroot, error_code_ok=True,
+          kill_timeout=30)
+      return return_obj.returncode == 0
+
+  def _InitializeTrybotPatchPool(self):
+    """Generate patch pool from patches specified on the command line.
+
+    Do this only if we need to patch changes later on.
+    """
+    changes_stage = sync_stages.PatchChangesStage.StageNamePrefix()
+    check_func = results_lib.Results.PreviouslyCompletedRecord
+    if not check_func(changes_stage) or self._run.options.bootstrap:
+      options = self._run.options
+      self.patch_pool = trybot_patch_pool.TrybotPatchPool.FromOptions(
+          gerrit_patches=options.gerrit_patches,
+          local_patches=options.local_patches,
+          sourceroot=options.sourceroot,
+          remote_patches=options.remote_patches)
+
+  def _GetBootstrapStage(self):
+    """Constructs and returns the BootStrapStage object.
+
+    We return None when there are no chromite patches to test, and
+    --test-bootstrap wasn't passed in.
+    """
+    stage = None
+
+    patches_needed = sync_stages.BootstrapStage.BootstrapPatchesNeeded(
+        self._run, self.patch_pool)
+
+    chromite_branch = git.GetChromiteTrackingBranch()
+
+    if (patches_needed or
+        self._run.options.test_bootstrap or
+        chromite_branch != self._run.options.branch):
+      stage = sync_stages.BootstrapStage(self._run, self.patch_pool)
+    return stage
+
+  def Run(self):
+    """Main runner for this builder class.  Runs build and prints summary.
+
+    Returns:
+      Whether the build succeeded.
+    """
+    self._InitializeTrybotPatchPool()
+
+    if self._run.options.bootstrap:
+      bootstrap_stage = self._GetBootstrapStage()
+      if bootstrap_stage:
+        # BootstrapStage blocks on re-execution of cbuildbot.
+        bootstrap_stage.Run()
+        return bootstrap_stage.returncode == 0
+
+    print_report = True
+    exception_thrown = False
+    success = True
+    sync_instance = None
+    try:
+      self.Initialize()
+      sync_instance = self.GetSyncInstance()
+      self._RunSyncStage(sync_instance)
+
+      if self._run.ShouldPatchAfterSync():
+        # Filter out patches to manifest, since PatchChangesStage can't handle
+        # them.  Manifest patches are patched in the BootstrapStage.
+        non_manifest_patches = self.patch_pool.FilterManifest(negate=True)
+        if non_manifest_patches:
+          self._RunStage(sync_stages.PatchChangesStage, non_manifest_patches)
+
+      # Now that we have a fully synced & patched tree, we can let the builder
+      # extract version information from the sources for this particular build.
+      self.SetVersionInfo()
+      if self._run.ShouldReexecAfterSync():
+        print_report = False
+        success = self._ReExecuteInBuildroot(sync_instance)
+      else:
+        self._RunStage(report_stages.BuildReexecutionFinishedStage)
+        self.RunStages()
+
+    except Exception as ex:
+      exception_thrown = True
+      if results_lib.Results.BuildSucceededSoFar():
+        # If the build is marked as successful, but threw exceptions, that's a
+        # problem. Print the traceback for debugging.
+        if isinstance(ex, failures_lib.CompoundFailure):
+          print(str(ex))
+
+        traceback.print_exc(file=sys.stdout)
+        raise
+
+      if not (print_report and isinstance(ex, failures_lib.StepFailure)):
+        # If the failed build threw a non-StepFailure exception, we
+        # should raise it.
+        raise
+
+    finally:
+      if print_report:
+        results_lib.WriteCheckpoint(self._run.options.buildroot)
+        completion_instance = self.GetCompletionInstance()
+        self._RunStage(report_stages.ReportStage, completion_instance)
+        success = results_lib.Results.BuildSucceededSoFar()
+        if exception_thrown and success:
+          success = False
+          logging.PrintBuildbotStepWarnings()
+          print("""\
+Exception thrown, but all stages marked successful. This is an internal error,
+because the stage that threw the exception should be marked as failing.""")
+
+    return success
diff --git a/cbuildbot/builders/misc_builders.py b/cbuildbot/builders/misc_builders.py
new file mode 100644
index 0000000..7481d68
--- /dev/null
+++ b/cbuildbot/builders/misc_builders.py
@@ -0,0 +1,21 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing various one-off builders."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot.builders import simple_builders
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import report_stages
+
+
+class RefreshPackagesBuilder(simple_builders.SimpleBuilder):
+  """Run the refresh packages status update."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    self._RunStage(build_stages.InitSDKStage)
+    self.RunSetupBoard()
+    self._RunStage(report_stages.RefreshPackageStatusStage)
diff --git a/cbuildbot/builders/release_builders.py b/cbuildbot/builders/release_builders.py
new file mode 100644
index 0000000..4b106e5
--- /dev/null
+++ b/cbuildbot/builders/release_builders.py
@@ -0,0 +1,35 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing release engineering related builders."""
+
+from __future__ import print_function
+
+from chromite.lib import parallel
+
+from chromite.cbuildbot.builders import simple_builders
+from chromite.cbuildbot.stages import branch_stages
+from chromite.cbuildbot.stages import release_stages
+
+
+class CreateBranchBuilder(simple_builders.SimpleBuilder):
+  """Create release branches in the manifest."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    self._RunStage(branch_stages.BranchUtilStage)
+
+
+class GeneratePayloadsBuilder(simple_builders.SimpleBuilder):
+  """Run the PaygenStage once for each board."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    def _RunStageWrapper(board):
+      self._RunStage(release_stages.PaygenStage, board=board,
+                     channels=self._run.options.channels, archive_stage=None)
+
+    with parallel.BackgroundTaskRunner(_RunStageWrapper) as queue:
+      for board in self._run.config.boards:
+        queue.put([board])
diff --git a/cbuildbot/builders/sdk_builders.py b/cbuildbot/builders/sdk_builders.py
new file mode 100644
index 0000000..2771275
--- /dev/null
+++ b/cbuildbot/builders/sdk_builders.py
@@ -0,0 +1,37 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing SDK builders."""
+
+from __future__ import print_function
+
+import datetime
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.builders import simple_builders
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import chrome_stages
+from chromite.cbuildbot.stages import sdk_stages
+
+
+class ChrootSdkBuilder(simple_builders.SimpleBuilder):
+  """Build the SDK chroot."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    # Unlike normal CrOS builds, the SDK has no concept of pinned CrOS manifest
+    # or specific Chrome version.  Use a datestamp instead.
+    version = datetime.datetime.now().strftime('%Y.%m.%d.%H%M%S')
+    self._RunStage(build_stages.UprevStage, boards=[])
+    self._RunStage(build_stages.InitSDKStage)
+    self._RunStage(build_stages.SetupBoardStage, constants.CHROOT_BUILDER_BOARD)
+    self._RunStage(chrome_stages.SyncChromeStage)
+    self._RunStage(chrome_stages.PatchChromeStage)
+    self._RunStage(sdk_stages.SDKBuildToolchainsStage)
+    self._RunStage(sdk_stages.SDKPackageStage, version=version)
+    self._RunStage(sdk_stages.SDKPackageToolchainOverlaysStage, version=version)
+    self._RunStage(sdk_stages.SDKTestStage)
+    self._RunStage(artifact_stages.UploadPrebuiltsStage,
+                   constants.CHROOT_BUILDER_BOARD, version=version)
diff --git a/cbuildbot/builders/simple_builders.py b/cbuildbot/builders/simple_builders.py
new file mode 100644
index 0000000..ce6ce19
--- /dev/null
+++ b/cbuildbot/builders/simple_builders.py
@@ -0,0 +1,414 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the simple builders."""
+
+from __future__ import print_function
+
+import collections
+
+from chromite.cbuildbot import afdo
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot.builders import generic_builders
+from chromite.cbuildbot.stages import afdo_stages
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import chrome_stages
+from chromite.cbuildbot.stages import completion_stages
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import release_stages
+from chromite.cbuildbot.stages import report_stages
+from chromite.cbuildbot.stages import sync_stages
+from chromite.cbuildbot.stages import test_stages
+from chromite.lib import cros_logging as logging
+from chromite.lib import patch as cros_patch
+from chromite.lib import parallel
+
+
+# TODO: SimpleBuilder needs to be broken up big time.
+
+
+BoardConfig = collections.namedtuple('BoardConfig', ['board', 'name'])
+
+
+class SimpleBuilder(generic_builders.Builder):
+  """Builder that performs basic vetting operations."""
+
+  def GetSyncInstance(self):
+    """Sync to lkgm or TOT as necessary.
+
+    Returns:
+      The instance of the sync stage to run.
+    """
+    if self._run.options.force_version:
+      sync_stage = self._GetStageInstance(
+          sync_stages.ManifestVersionedSyncStage)
+    elif self._run.config.use_lkgm:
+      sync_stage = self._GetStageInstance(sync_stages.LKGMSyncStage)
+    elif self._run.config.use_chrome_lkgm:
+      sync_stage = self._GetStageInstance(chrome_stages.ChromeLKGMSyncStage)
+    else:
+      sync_stage = self._GetStageInstance(sync_stages.SyncStage)
+
+    return sync_stage
+
+  def GetVersionInfo(self):
+    """Returns the CrOS version info from the chromiumos-overlay."""
+    return manifest_version.VersionInfo.from_repo(self._run.buildroot)
+
+  def _GetChangesUnderTest(self):
+    """Returns the list of GerritPatch changes under test."""
+    changes = set()
+
+    changes_json_list = self._run.attrs.metadata.GetDict().get('changes', [])
+    for change_dict in changes_json_list:
+      change = cros_patch.GerritFetchOnlyPatch.FromAttrDict(change_dict)
+      changes.add(change)
+
+    # Also add the changes from PatchChangeStage, the PatchChangeStage doesn't
+    # write changes into metadata.
+    if self._run.ShouldPatchAfterSync():
+      changes.update(set(self.patch_pool.gerrit_patches))
+
+    return list(changes)
+
+  def _RunHWTests(self, builder_run, board):
+    """Run hwtest-related stages for the specified board.
+
+    Args:
+      builder_run: BuilderRun object for these background stages.
+      board: Board name.
+    """
+    parallel_stages = []
+
+    # We can not run hw tests without archiving the payloads.
+    if builder_run.options.archive:
+      for suite_config in builder_run.config.hw_tests:
+        stage_class = None
+        if suite_config.async:
+          stage_class = test_stages.ASyncHWTestStage
+        elif suite_config.suite == constants.HWTEST_AU_SUITE:
+          stage_class = test_stages.AUTestStage
+        else:
+          stage_class = test_stages.HWTestStage
+        if suite_config.blocking:
+          self._RunStage(stage_class, board, suite_config,
+                         builder_run=builder_run)
+        else:
+          new_stage = self._GetStageInstance(stage_class, board,
+                                             suite_config,
+                                             builder_run=builder_run)
+          parallel_stages.append(new_stage)
+
+    self._RunParallelStages(parallel_stages)
+
+  def _RunBackgroundStagesForBoardAndMarkAsSuccessful(self, builder_run, board):
+    """Run background board-specific stages for the specified board.
+
+    After finishing the build, mark it as successful.
+
+    Args:
+      builder_run: BuilderRun object for these background stages.
+      board: Board name.
+    """
+    self._RunBackgroundStagesForBoard(builder_run, board)
+    board_runattrs = builder_run.GetBoardRunAttrs(board)
+    board_runattrs.SetParallel('success', True)
+
+  def _RunBackgroundStagesForBoard(self, builder_run, board):
+    """Run background board-specific stages for the specified board.
+
+    Used by _RunBackgroundStagesForBoardAndMarkAsSuccessful. Callers should use
+    that method instead.
+
+    Args:
+      builder_run: BuilderRun object for these background stages.
+      board: Board name.
+    """
+    config = builder_run.config
+
+    # TODO(mtennant): This is the last usage of self.archive_stages.  We can
+    # kill it once we migrate its uses to BuilderRun so that none of the
+    # stages below need it as an argument.
+    archive_stage = self.archive_stages[BoardConfig(board, config.name)]
+    if config.afdo_generate_min:
+      self._RunParallelStages([archive_stage])
+      return
+
+    # paygen can't complete without push_image.
+    assert not config.paygen or config.push_image
+
+    if config.build_packages_in_background:
+      self._RunStage(build_stages.BuildPackagesStage, board,
+                     update_metadata=True, builder_run=builder_run,
+                     afdo_use=config.afdo_use)
+
+    if builder_run.config.compilecheck or builder_run.options.compilecheck:
+      self._RunStage(test_stages.UnitTestStage, board,
+                     builder_run=builder_run)
+      return
+
+    # Build the image first before doing anything else.
+    # TODO(davidjames): Remove this lock once http://crbug.com/352994 is fixed.
+    with self._build_image_lock:
+      self._RunStage(build_stages.BuildImageStage, board,
+                     builder_run=builder_run, afdo_use=config.afdo_use)
+
+    # While this stage list is run in parallel, the order here dictates the
+    # order that things will be shown in the log.  So group things together
+    # that make sense when read in order.  Also keep in mind that, since we
+    # gather output manually, early slow stages will prevent any output from
+    # later stages showing up until it finishes.
+
+    # Determine whether to run the DetectIrrelevantChangesStage
+    stage_list = []
+    changes = self._GetChangesUnderTest()
+    if changes:
+      stage_list += [[report_stages.DetectIrrelevantChangesStage, board,
+                      changes]]
+    stage_list += [[chrome_stages.ChromeSDKStage, board]]
+
+    if config.vm_test_runs > 1:
+      # Run the VMTests multiple times to see if they fail.
+      stage_list += [
+          [generic_stages.RepeatStage, config.vm_test_runs,
+           test_stages.VMTestStage, board]]
+    else:
+      # Give the VMTests one retry attempt in case failures are flaky.
+      stage_list += [[generic_stages.RetryStage, 1, test_stages.VMTestStage,
+                      board]]
+
+    if config.afdo_generate:
+      stage_list += [[afdo_stages.AFDODataGenerateStage, board]]
+
+    stage_list += [
+        [release_stages.SignerTestStage, board, archive_stage],
+        [release_stages.PaygenStage, board, archive_stage],
+        [test_stages.ImageTestStage, board],
+        [test_stages.UnitTestStage, board],
+        [artifact_stages.UploadPrebuiltsStage, board],
+        [artifact_stages.DevInstallerPrebuiltsStage, board],
+        [artifact_stages.DebugSymbolsStage, board],
+        [artifact_stages.CPEExportStage, board],
+        [artifact_stages.UploadTestArtifactsStage, board],
+    ]
+
+    stage_objs = [self._GetStageInstance(*x, builder_run=builder_run)
+                  for x in stage_list]
+
+    parallel.RunParallelSteps([
+        lambda: self._RunParallelStages(stage_objs + [archive_stage]),
+        lambda: self._RunHWTests(builder_run, board),
+    ])
+
+  def RunSetupBoard(self):
+    """Run the SetupBoard stage for all child configs and boards."""
+    for builder_run in self._run.GetUngroupedBuilderRuns():
+      for board in builder_run.config.boards:
+        self._RunStage(build_stages.SetupBoardStage, board,
+                       builder_run=builder_run)
+
+  def _RunMasterPaladinOrChromePFQBuild(self):
+    """Runs through the stages of the paladin or chrome PFQ master build."""
+    self._RunStage(build_stages.UprevStage)
+    self._RunStage(build_stages.InitSDKStage)
+    # The CQ/Chrome PFQ master will not actually run the SyncChrome stage, but
+    # we want the logic that gets triggered when SyncChrome stage is skipped.
+    self._RunStage(chrome_stages.SyncChromeStage)
+    if self._run.config.build_type == constants.PALADIN_TYPE:
+      self._RunStage(build_stages.RegenPortageCacheStage)
+    self._RunStage(test_stages.BinhostTestStage)
+    self._RunStage(test_stages.BranchUtilTestStage)
+    self._RunStage(artifact_stages.MasterUploadPrebuiltsStage)
+
+  def _RunDefaultTypeBuild(self):
+    """Runs through the stages of a non-special-type build."""
+    self._RunStage(build_stages.UprevStage)
+    self._RunStage(build_stages.InitSDKStage)
+    self._RunStage(build_stages.RegenPortageCacheStage)
+    self.RunSetupBoard()
+    self._RunStage(chrome_stages.SyncChromeStage)
+    self._RunStage(chrome_stages.PatchChromeStage)
+    self._RunStage(test_stages.BinhostTestStage)
+    self._RunStage(test_stages.BranchUtilTestStage)
+
+    # Prepare stages to run in background.  If child_configs exist then
+    # run each of those here, otherwise use default config.
+    builder_runs = self._run.GetUngroupedBuilderRuns()
+
+    tasks = []
+    for builder_run in builder_runs:
+      # Prepare a local archive directory for each "run".
+      builder_run.GetArchive().SetupArchivePath()
+
+      for board in builder_run.config.boards:
+        archive_stage = self._GetStageInstance(
+            artifact_stages.ArchiveStage, board, builder_run=builder_run,
+            chrome_version=self._run.attrs.chrome_version)
+        board_config = BoardConfig(board, builder_run.config.name)
+        self.archive_stages[board_config] = archive_stage
+        tasks.append((builder_run, board))
+
+    # Set up a process pool to run test/archive stages in the background.
+    # This process runs task(board) for each board added to the queue.
+    task_runner = self._RunBackgroundStagesForBoardAndMarkAsSuccessful
+    with parallel.BackgroundTaskRunner(task_runner) as queue:
+      for builder_run, board in tasks:
+        if not builder_run.config.build_packages_in_background:
+          # Run BuildPackages in the foreground, generating or using AFDO data
+          # if requested.
+          kwargs = {'builder_run': builder_run}
+          if builder_run.config.afdo_generate_min:
+            kwargs['afdo_generate_min'] = True
+          elif builder_run.config.afdo_use:
+            kwargs['afdo_use'] = True
+
+          self._RunStage(build_stages.BuildPackagesStage, board,
+                         update_metadata=True, **kwargs)
+
+          if (builder_run.config.afdo_generate_min and
+              afdo.CanGenerateAFDOData(board)):
+            # Generate the AFDO data before allowing any other tasks to run.
+            self._RunStage(build_stages.BuildImageStage, board, **kwargs)
+            self._RunStage(artifact_stages.UploadTestArtifactsStage, board,
+                           builder_run=builder_run,
+                           suffix='[afdo_generate_min]')
+            for suite in builder_run.config.hw_tests:
+              self._RunStage(test_stages.HWTestStage, board, suite,
+                             builder_run=builder_run)
+            self._RunStage(afdo_stages.AFDODataGenerateStage, board,
+                           builder_run=builder_run)
+
+          if (builder_run.config.afdo_generate_min and
+              builder_run.config.afdo_update_ebuild):
+            self._RunStage(afdo_stages.AFDOUpdateEbuildStage,
+                           builder_run=builder_run)
+
+        # Kick off our background stages.
+        queue.put([builder_run, board])
+
+  def RunStages(self):
+    """Runs through build process."""
+    # TODO(sosa): Split these out into classes.
+    if self._run.config.build_type == constants.PRE_CQ_LAUNCHER_TYPE:
+      self._RunStage(sync_stages.PreCQLauncherStage)
+    elif ((self._run.config.build_type == constants.PALADIN_TYPE or
+           self._run.config.build_type == constants.CHROME_PFQ_TYPE) and
+          self._run.config.master):
+      self._RunMasterPaladinOrChromePFQBuild()
+    else:
+      self._RunDefaultTypeBuild()
+
+
+class DistributedBuilder(SimpleBuilder):
+  """Build class that has special logic to handle distributed builds.
+
+  These builds sync using git/manifest logic in manifest_versions.  In general
+  they use a non-distributed builder code for the bulk of the work.
+  """
+
+  def __init__(self, *args, **kwargs):
+    """Initializes a buildbot builder.
+
+    Extra variables:
+      completion_stage_class:  Stage used to complete a build.  Set in the Sync
+        stage.
+    """
+    super(DistributedBuilder, self).__init__(*args, **kwargs)
+    self.completion_stage_class = None
+    self.sync_stage = None
+    self._completion_stage = None
+
+  def GetSyncInstance(self):
+    """Syncs the tree using one of the distributed sync logic paths.
+
+    Returns:
+      The instance of the sync stage to run.
+    """
+    # Determine sync class to use.  CQ overrides PFQ bits so should check it
+    # first.
+    if self._run.config.pre_cq:
+      sync_stage = self._GetStageInstance(sync_stages.PreCQSyncStage,
+                                          self.patch_pool.gerrit_patches)
+      self.completion_stage_class = completion_stages.PreCQCompletionStage
+      self.patch_pool.gerrit_patches = []
+    elif config_lib.IsCQType(self._run.config.build_type):
+      if self._run.config.do_not_apply_cq_patches:
+        sync_stage = self._GetStageInstance(
+            sync_stages.MasterSlaveLKGMSyncStage)
+      else:
+        sync_stage = self._GetStageInstance(sync_stages.CommitQueueSyncStage)
+      self.completion_stage_class = completion_stages.CommitQueueCompletionStage
+    elif config_lib.IsPFQType(self._run.config.build_type):
+      sync_stage = self._GetStageInstance(sync_stages.MasterSlaveLKGMSyncStage)
+      self.completion_stage_class = (
+          completion_stages.MasterSlaveSyncCompletionStage)
+    elif config_lib.IsCanaryType(self._run.config.build_type):
+      sync_stage = self._GetStageInstance(
+          sync_stages.ManifestVersionedSyncStage)
+      self.completion_stage_class = (
+          completion_stages.CanaryCompletionStage)
+    else:
+      sync_stage = self._GetStageInstance(
+          sync_stages.ManifestVersionedSyncStage)
+      self.completion_stage_class = (
+          completion_stages.ManifestVersionedSyncCompletionStage)
+
+    self.sync_stage = sync_stage
+    return self.sync_stage
+
+  def GetCompletionInstance(self):
+    """Returns the completion_stage_class instance that was used for this build.
+
+    Returns:
+      None if the completion_stage instance was not yet created (this
+      occurs during Publish).
+    """
+    return self._completion_stage
+
+  def Publish(self, was_build_successful, build_finished):
+    """Completes build by publishing any required information.
+
+    Args:
+      was_build_successful: Whether the build succeeded.
+      build_finished: Whether the build completed. A build can be successful
+        without completing if it exits early with sys.exit(0).
+    """
+    completion_stage = self._GetStageInstance(self.completion_stage_class,
+                                              self.sync_stage,
+                                              was_build_successful)
+    self._completion_stage = completion_stage
+    completion_successful = False
+    try:
+      completion_stage.Run()
+      completion_successful = True
+      if (self._run.config.afdo_update_ebuild and
+          not self._run.config.afdo_generate_min):
+        self._RunStage(afdo_stages.AFDOUpdateEbuildStage)
+    finally:
+      if self._run.config.push_overlays:
+        publish = (was_build_successful and completion_successful and
+                   build_finished)
+        self._RunStage(completion_stages.PublishUprevChangesStage, publish)
+
+  def RunStages(self):
+    """Runs simple builder logic and publishes information to overlays."""
+    was_build_successful = False
+    build_finished = False
+    try:
+      super(DistributedBuilder, self).RunStages()
+      was_build_successful = results_lib.Results.BuildSucceededSoFar()
+      build_finished = True
+    except SystemExit as ex:
+      # If a stage calls sys.exit(0), it's exiting with success, so that means
+      # we should mark ourselves as successful.
+      logging.info('Detected sys.exit(%s)', ex.code)
+      if ex.code == 0:
+        was_build_successful = True
+      raise
+    finally:
+      self.Publish(was_build_successful, build_finished)
diff --git a/cbuildbot/builders/simple_builders_unittest b/cbuildbot/builders/simple_builders_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/builders/simple_builders_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/builders/simple_builders_unittest.py b/cbuildbot/builders/simple_builders_unittest.py
new file mode 100644
index 0000000..b013fd1
--- /dev/null
+++ b/cbuildbot/builders/simple_builders_unittest.py
@@ -0,0 +1,105 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for simpler builders."""
+
+from __future__ import print_function
+
+import copy
+import os
+
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.builders import generic_builders
+from chromite.cbuildbot.builders import simple_builders
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.scripts import cbuildbot
+
+
+# pylint: disable=protected-access
+
+
+class SimpleBuilderTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for the main code paths in simple_builders.SimpleBuilder"""
+
+  def setUp(self):
+    self.buildroot = os.path.join(self.tempdir, 'buildroot')
+    chroot_path = os.path.join(self.buildroot, constants.DEFAULT_CHROOT_DIR)
+    osutils.SafeMakedirs(os.path.join(chroot_path, 'tmp'))
+
+    self.PatchObject(generic_builders.Builder, '_RunStage')
+    self.PatchObject(simple_builders.SimpleBuilder, '_RunParallelStages')
+    self.PatchObject(cbuildbot_run._BuilderRunBase, 'GetVersion',
+                     return_value='R32-1234.0.0')
+
+    self._manager = parallel.Manager()
+    self._manager.__enter__()
+
+  def tearDown(self):
+    # Mimic exiting a 'with' statement.
+    self._manager.__exit__(None, None, None)
+
+  def _initConfig(self, bot_id, extra_argv=None):
+    """Return normal options/build_config for |bot_id|"""
+    site_config = chromeos_config.GetConfig()
+    build_config = copy.deepcopy(site_config[bot_id])
+    build_config['master'] = False
+    build_config['important'] = False
+
+    # Use the cbuildbot parser to create properties and populate default values.
+    parser = cbuildbot._CreateParser()
+    argv = (['-r', self.buildroot, '--buildbot', '--debug', '--nochromesdk'] +
+            (extra_argv if extra_argv else []) + [bot_id])
+    options, _ = cbuildbot._ParseCommandLine(parser, argv)
+
+    # Yikes.
+    options.managed_chrome = build_config['sync_chrome']
+
+    return cbuildbot_run.BuilderRun(
+        options, site_config, build_config, self._manager)
+
+  def testRunStagesPreCQ(self):
+    """Verify RunStages for PRE_CQ_LAUNCHER_TYPE builders"""
+    builder_run = self._initConfig('pre-cq-launcher')
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesBranchUtil(self):
+    """Verify RunStages for CREATE_BRANCH_TYPE builders"""
+    extra_argv = ['--branch-name', 'foo', '--version', '1234']
+    builder_run = self._initConfig(constants.BRANCH_UTIL_CONFIG,
+                                   extra_argv=extra_argv)
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesChrootBuilder(self):
+    """Verify RunStages for CHROOT_BUILDER_TYPE builders"""
+    builder_run = self._initConfig('chromiumos-sdk')
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesRefreshPackages(self):
+    """Verify RunStages for REFRESH_PACKAGES_TYPE builders"""
+    builder_run = self._initConfig('refresh-packages')
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesDefaultBuild(self):
+    """Verify RunStages for standard board builders"""
+    builder_run = self._initConfig('x86-generic-full')
+    builder_run.attrs.chrome_version = 'TheChromeVersion'
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesDefaultBuildCompileCheck(self):
+    """Verify RunStages for standard board builders (compile only)"""
+    extra_argv = ['--compilecheck']
+    builder_run = self._initConfig('x86-generic-full', extra_argv=extra_argv)
+    builder_run.attrs.chrome_version = 'TheChromeVersion'
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesDefaultBuildHwTests(self):
+    """Verify RunStages for boards w/hwtests"""
+    extra_argv = ['--hwtest']
+    builder_run = self._initConfig('lumpy-release', extra_argv=extra_argv)
+    builder_run.attrs.chrome_version = 'TheChromeVersion'
+    simple_builders.SimpleBuilder(builder_run).RunStages()
diff --git a/cbuildbot/builders/test_builders.py b/cbuildbot/builders/test_builders.py
new file mode 100644
index 0000000..7a158e1
--- /dev/null
+++ b/cbuildbot/builders/test_builders.py
@@ -0,0 +1,36 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing builders intended for testing cbuildbot behaviors."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_logging as logging
+
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot.builders import generic_builders
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import sync_stages
+
+
+class SuccessStage(generic_stages.BuilderStage):
+  """Build stage declares success!"""
+  def Run(self):
+    logging.info('!!!SuccessStage, FTW!!!')
+
+
+class ManifestVersionedSyncBuilder(generic_builders.Builder):
+  """Builder that performs sync, then exits."""
+
+  def GetVersionInfo(self):
+    """Returns the CrOS version info from the chromiumos-overlay."""
+    return manifest_version.VersionInfo.from_repo(self._run.buildroot)
+
+  def GetSyncInstance(self):
+    """Returns an instance of a SyncStage that should be run."""
+    return self._GetStageInstance(sync_stages.ManifestVersionedSyncStage)
+
+  def RunStages(self):
+    """Run something after sync/reexec."""
+    self._RunStage(SuccessStage)
diff --git a/cbuildbot/cbuildbot b/cbuildbot/cbuildbot
new file mode 120000
index 0000000..a179d39
--- /dev/null
+++ b/cbuildbot/cbuildbot
@@ -0,0 +1 @@
+cbuildbot.py
\ No newline at end of file
diff --git a/cbuildbot/cbuildbot.py b/cbuildbot/cbuildbot.py
new file mode 100755
index 0000000..41572b3
--- /dev/null
+++ b/cbuildbot/cbuildbot.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python2
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(ferringb): remove this as soon as depot_tools is gutted of its
+# import logic, and is just a re-exec.
+
+"""Dynamic wrapper to invoke cbuildbot with standardized import paths."""
+
+from __future__ import print_function
+
+import os
+import sys
+
+def main():
+  # Bypass all of chromite_wrappers attempted 'help', and execve to the actual
+  # cbuildbot wrapper/helper chromite has.
+  location = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+  location = os.path.join(location, 'bin', 'cbuildbot')
+  os.execv(location, [location] + sys.argv[1:])
+
+if __name__ == '__main__':
+  main()
diff --git a/cbuildbot/cbuildbot_run.py b/cbuildbot/cbuildbot_run.py
new file mode 100644
index 0000000..ee2d67b
--- /dev/null
+++ b/cbuildbot/cbuildbot_run.py
@@ -0,0 +1,937 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provide a class for collecting info on one builder run.
+
+There are two public classes, BuilderRun and ChildBuilderRun, that serve
+this function.  The first is for most situations, the second is for "child"
+configs within a builder config that has entries in "child_configs".
+
+Almost all functionality is within the common _BuilderRunBase class.  The
+only thing the BuilderRun and ChildBuilderRun classes are responsible for
+is overriding the self.config value in the _BuilderRunBase object whenever
+it is accessed.
+
+It is important to note that for one overall run, there will be one
+BuilderRun object and zero or more ChildBuilderRun objects, but they
+will all share the same _BuilderRunBase *object*.  This means, for example,
+that run attributes (e.g. self.attrs.release_tag) are shared between them
+all, as intended.
+"""
+
+from __future__ import print_function
+
+import cPickle
+import functools
+import os
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=F0401
+  import queue as Queue
+import types
+
+from chromite.cbuildbot import archive_lib
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import tree_status
+from chromite.lib import cidb
+from chromite.lib import portage_util
+
+
+class RunAttributesError(Exception):
+  """Base class for exceptions related to RunAttributes behavior."""
+
+  def __str__(self):
+    """Handle stringify because base class will just spit out self.args."""
+    return self.msg
+
+
+class VersionNotSetError(RuntimeError):
+  """Error raised if trying to access version_info before it's set."""
+
+
+class ParallelAttributeError(AttributeError):
+  """Custom version of AttributeError."""
+
+  def __init__(self, attr, board=None, target=None, *args):
+    if board or target:
+      self.msg = ('No such board-specific parallel run attribute %r for %s/%s' %
+                  (attr, board, target))
+    else:
+      self.msg = 'No such parallel run attribute %r' % attr
+    super(ParallelAttributeError, self).__init__(self.msg, *args)
+    self.args = (attr, board, target) + tuple(args)
+
+  def __str__(self):
+    return self.msg
+
+
+class AttrSepCountError(ValueError):
+  """Custom version of ValueError for when BOARD_ATTR_SEP is misused."""
+  def __init__(self, attr, *args):
+    self.msg = ('Attribute name has an unexpected number of "%s" occurrences'
+                ' in it: %s' % (RunAttributes.BOARD_ATTR_SEP, attr))
+    super(AttrSepCountError, self).__init__(self.msg, *args)
+    self.args = (attr, ) + tuple(args)
+
+  def __str__(self):
+    return self.msg
+
+
+class AttrNotPickleableError(RunAttributesError):
+  """For when attribute value to queue is not pickleable."""
+
+  def __init__(self, attr, value, *args):
+    self.msg = 'Run attribute "%s" value cannot be pickled: %r' % (attr, value)
+    super(AttrNotPickleableError, self).__init__(self.msg, *args)
+    self.args = (attr, value) + tuple(args)
+
+
+class AttrTimeoutError(RunAttributesError):
+  """For when timeout is reached while waiting for attribute value."""
+
+  def __init__(self, attr, *args):
+    self.msg = 'Timed out waiting for value for run attribute "%s".' % attr
+    super(AttrTimeoutError, self).__init__(self.msg, *args)
+    self.args = (attr, ) + tuple(args)
+
+
+class LockableQueue(object):
+  """Multiprocessing queue with associated recursive lock.
+
+  Objects of this class function just like a regular multiprocessing Queue,
+  except that there is also an rlock attribute for getting a multiprocessing
+  RLock associated with this queue.  Actual locking must still be handled by
+  the calling code.  Example usage:
+
+  with queue.rlock:
+    ... process the queue in some way.
+  """
+
+  def __init__(self, manager):
+    self._queue = manager.Queue()
+    self.rlock = manager.RLock()
+
+  def __getattr__(self, attr):
+    """Relay everything to the underlying Queue object at self._queue."""
+    return getattr(self._queue, attr)
+
+
+class RunAttributes(object):
+  """Hold all run attributes for a particular builder run.
+
+  There are two supported flavors of run attributes: REGULAR attributes are
+  available only to stages that are run sequentially as part of the main (top)
+  process and PARALLEL attributes are available to all stages, no matter what
+  process they are in.  REGULAR attributes are accessed directly as normal
+  attributes on a RunAttributes object, while PARALLEL attributes are accessed
+  through the {Set|Has|Get}Parallel methods.  PARALLEL attributes also have the
+  restriction that their values must be pickle-able (in order to be sent
+  through multiprocessing queue).
+
+  The currently supported attributes of each kind are listed in REGULAR_ATTRS
+  and PARALLEL_ATTRS below.  To add support for a new run attribute simply
+  add it to one of those sets.
+
+  A subset of PARALLEL_ATTRS is BOARD_ATTRS.  These attributes only have meaning
+  in the context of a specific board and config target.  The attributes become
+  available once a board/config is registered for a run, and then they can be
+  accessed through the {Set|Has|Get}BoardParallel methods or through the
+  {Get|Set|Has}Parallel methods of a BoardRunAttributes object.  The latter is
+  encouraged.
+
+  To add a new BOARD attribute simply add it to the BOARD_ATTRS set below, which
+  will also add it to PARALLEL_ATTRS (all BOARD attributes are assumed to need
+  PARALLEL support).
+  """
+
+  REGULAR_ATTRS = frozenset((
+      'chrome_version',   # Set by SyncChromeStage, if it runs.
+      'manifest_manager', # Set by ManifestVersionedSyncStage.
+      'release_tag',      # Set by cbuildbot after sync stage.
+      'version_info',     # Set by the builder after sync+patch stage.
+      'metadata',         # Used by various build stages to record metadata.
+  ))
+
+  # TODO(mtennant): It might be useful to have additional info for each board
+  # attribute:  1) a log-friendly pretty name, 2) a rough upper bound timeout
+  # value for consumers of the attribute to use when waiting for it.
+  BOARD_ATTRS = frozenset((
+      'breakpad_symbols_generated', # Set by DebugSymbolsStage.
+      'debug_tarball_generated',    # Set by DebugSymbolsStage.
+      'images_generated',           # Set by BuildImageStage.
+      'payloads_generated',         # Set by UploadHWTestArtifacts.
+      'delta_payloads_generated',   # Set by UploadHWTestArtifacts.
+      'instruction_urls_per_channel', # Set by ArchiveStage
+      'success',                    # Set by cbuildbot.py:Builder
+      'packages_under_test',        # Set by BuildPackagesStage.
+      'gce_tarball_generated',       # Set by ArchiveStage.
+  ))
+
+  # Attributes that need to be set by stages that can run in parallel
+  # (i.e. in a subprocess) must be included here.  All BOARD_ATTRS are
+  # assumed to fit into this category.
+  PARALLEL_ATTRS = BOARD_ATTRS | frozenset((
+      'unittest_value',   # For unittests.  An example of a PARALLEL attribute
+                          # that is not also a BOARD attribute.
+  ))
+
+  # This separator is used to create a unique attribute name for any
+  # board-specific attribute.  For example:
+  # breakpad_symbols_generated||stumpy||stumpy-full-config
+  BOARD_ATTR_SEP = '||'
+
+  # Sanity check, make sure there is no overlap between the attr groups.
+  assert not REGULAR_ATTRS & PARALLEL_ATTRS
+
+  # REGULAR_ATTRS show up as attributes directly on the RunAttributes object.
+  __slots__ = tuple(REGULAR_ATTRS) + (
+      '_board_targets', # Set of registered board/target combinations.
+      '_manager',       # The multiprocessing.Manager to use.
+      '_queues',        # Dict of parallel attribute names to LockableQueues.
+  )
+
+  def __init__(self, multiprocess_manager):
+    # The __slots__ logic above confuses pylint.
+    # https://bitbucket.org/logilab/pylint/issue/380/
+    # pylint: disable=assigning-non-slot
+
+    # Create queues for all non-board-specific parallel attributes now.
+    # Parallel board attributes must wait for the board to be registered.
+    self._manager = multiprocess_manager
+    self._queues = {}
+    for attr in RunAttributes.PARALLEL_ATTRS:
+      if attr not in RunAttributes.BOARD_ATTRS:
+        # pylint: disable=E1101
+        self._queues[attr] = LockableQueue(self._manager)
+
+    # Set of known <board>||<target> combinations.
+    self._board_targets = set()
+
+  def RegisterBoardAttrs(self, board, target):
+    """Register a new valid board/target combination.  Safe to repeat.
+
+    Args:
+      board: Board name to register.
+      target: Build config name to register.
+
+    Returns:
+      A new BoardRunAttributes object for more convenient access to the newly
+        registered attributes specific to this board/target combination.
+    """
+    board_target = RunAttributes.BOARD_ATTR_SEP.join((board, target))
+
+    if not board_target in self._board_targets:
+      # Register board/target as a known board/target.
+      self._board_targets.add(board_target)
+
+      # For each board attribute that should be queue-able, create its queue
+      # now.  Queues are kept by the uniquified run attribute name.
+      for attr in RunAttributes.BOARD_ATTRS:
+        # Every attr in BOARD_ATTRS is in PARALLEL_ATTRS, by construction.
+        # pylint: disable=E1101
+        uniquified_attr = self._GetBoardAttrName(attr, board, target)
+        self._queues[uniquified_attr] = LockableQueue(self._manager)
+
+    return BoardRunAttributes(self, board, target)
+
+  # TODO(mtennant): Complain if a child process attempts to set a non-parallel
+  # run attribute?  It could be done something like this:
+  #def __setattr__(self, attr, value):
+  #  """Override __setattr__ to prevent misuse of run attributes."""
+  #  if attr in self.REGULAR_ATTRS:
+  #    assert not self._IsChildProcess()
+  #  super(RunAttributes, self).__setattr__(attr, value)
+
+  def _GetBoardAttrName(self, attr, board, target):
+    """Translate plain |attr| to uniquified board attribute name.
+
+    Args:
+      attr: Plain run attribute name.
+      board: Board name.
+      target: Build config name.
+
+    Returns:
+      The uniquified board-specific attribute name.
+
+    Raises:
+      AssertionError if the board/target combination does not exist.
+    """
+    board_target = RunAttributes.BOARD_ATTR_SEP.join((board, target))
+    assert board_target in self._board_targets, \
+        'Unknown board/target combination: %s/%s' % (board, target)
+
+    # Translate to the unique attribute name for attr/board/target.
+    return RunAttributes.BOARD_ATTR_SEP.join((attr, board, target))
+
+  def SetBoardParallel(self, attr, value, board, target):
+    """Set board-specific parallel run attribute value.
+
+    Args:
+      attr: Plain board run attribute name.
+      value: Value to set.
+      board: Board name.
+      target: Build config name.
+    """
+    unique_attr = self._GetBoardAttrName(attr, board, target)
+    try:
+      self.SetParallel(unique_attr, value)
+    except ParallelAttributeError:
+      # Clarify the AttributeError.
+      raise ParallelAttributeError(attr, board=board, target=target)
+
+  def HasBoardParallel(self, attr, board, target):
+    """Return True if board-specific parallel run attribute is known and set.
+
+    Args:
+      attr: Plain board run attribute name.
+      board: Board name.
+      target: Build config name.
+    """
+    unique_attr = self._GetBoardAttrName(attr, board, target)
+    return self.HasParallel(unique_attr)
+
+  def SetBoardParallelDefault(self, attr, default_value, board, target):
+    """Set board-specific parallel run attribute value, if not already set.
+
+    Args:
+      attr: Plain board run attribute name.
+      default_value: Value to set.
+      board: Board name.
+      target: Build config name.
+    """
+    if not self.HasBoardParallel(attr, board, target):
+      self.SetBoardParallel(attr, default_value, board, target)
+
+  def GetBoardParallel(self, attr, board, target, timeout=0):
+    """Get board-specific parallel run attribute value.
+
+    Args:
+      attr: Plain board run attribute name.
+      board: Board name.
+      target: Build config name.
+      timeout: See GetParallel for description.
+
+    Returns:
+      The value found.
+    """
+    unique_attr = self._GetBoardAttrName(attr, board, target)
+    try:
+      return self.GetParallel(unique_attr, timeout=timeout)
+    except ParallelAttributeError:
+      # Clarify the AttributeError.
+      raise ParallelAttributeError(attr, board=board, target=target)
+
+  def _GetQueue(self, attr, strict=False):
+    """Return the queue for the given attribute, if it exists.
+
+    Args:
+      attr: The run attribute name.
+      strict: If True, then complain if queue for |attr| is not found.
+
+    Returns:
+      The LockableQueue for this attribute, if it has one, or None
+        (assuming strict is False).
+
+    Raises:
+      ParallelAttributeError if no queue for this attribute is registered,
+        meaning no parallel attribute by this name is known.
+    """
+    queue = self._queues.get(attr)
+
+    if queue is None and strict:
+      raise ParallelAttributeError(attr)
+
+    return queue
+
+  def SetParallel(self, attr, value):
+    """Set the given parallel run attribute value.
+
+    Called to set the value of any parallel run attribute.  The value is
+    saved onto a multiprocessing queue for that attribute.
+
+    Args:
+      attr: Name of the attribute.
+      value: Value to give the attribute.  This value must be pickleable.
+
+    Raises:
+      ParallelAttributeError if attribute is not a valid parallel attribute.
+      AttrNotPickleableError if value cannot be pickled, meaning it cannot
+        go through the queue system.
+    """
+    # Confirm that value can be pickled, because otherwise it will fail
+    # in the queue.
+    try:
+      cPickle.dumps(value, cPickle.HIGHEST_PROTOCOL)
+    except cPickle.PicklingError:
+      raise AttrNotPickleableError(attr, value)
+
+    queue = self._GetQueue(attr, strict=True)
+
+    with queue.rlock:
+      # First empty the queue.  Any value already on the queue is now stale.
+      while True:
+        try:
+          queue.get(False)
+        except Queue.Empty:
+          break
+
+      queue.put(value)
+
+  def HasParallel(self, attr):
+    """Return True if the given parallel run attribute is known and set.
+
+    Args:
+      attr: Name of the attribute.
+    """
+    try:
+      queue = self._GetQueue(attr, strict=True)
+
+      with queue.rlock:
+        return not queue.empty()
+    except ParallelAttributeError:
+      return False
+
+  def SetParallelDefault(self, attr, default_value):
+    """Set the given parallel run attribute only if it is not already set.
+
+    This leverages HasParallel and SetParallel in a convenient pattern.
+
+    Args:
+      attr: Name of the attribute.
+      default_value: Value to give the attribute if it is not set.  This value
+        must be pickleable.
+
+    Raises:
+      ParallelAttributeError if attribute is not a valid parallel attribute.
+      AttrNotPickleableError if value cannot be pickled, meaning it cannot
+        go through the queue system.
+    """
+    if not self.HasParallel(attr):
+      self.SetParallel(attr, default_value)
+
+  # TODO(mtennant): Add an option to log access, including the time to wait
+  # or waited.  It could be enabled with an optional announce=False argument.
+  # See GetParallel helper on BoardSpecificBuilderStage class for ideas.
+  def GetParallel(self, attr, timeout=0):
+    """Get value for the given parallel run attribute, optionally waiting.
+
+    If the given parallel run attr already has a value in the queue it will
+    return that value right away.  Otherwise, it will wait for a value to
+    appear in the queue up to the timeout specified (timeout of None means
+    wait forever) before returning the value found or raising AttrTimeoutError
+    if a timeout was reached.
+
+    Args:
+      attr: The name of the run attribute.
+      timeout: Timeout, in seconds.  A None value means wait forever,
+        which is probably never a good idea.  A value of 0 does not wait at all.
+
+    Raises:
+      ParallelAttributeError if attribute is not set and timeout was 0.
+      AttrTimeoutError if timeout is greater than 0 and timeout is reached
+        before a value is available on the queue.
+    """
+    got_value = False
+    queue = self._GetQueue(attr, strict=True)
+
+    # First attempt to get a value off the queue, without the lock.  This
+    # allows a blocking get to wait for a value to appear.
+    try:
+      value = queue.get(True, timeout)
+      got_value = True
+    except Queue.Empty:
+      # This means there is nothing on the queue.  Let this fall through to
+      # the locked code block to see if another process is in the process
+      # of re-queuing a value.  Any process doing that will have a lock.
+      pass
+
+    # Now grab the queue lock and flush any other values that are on the queue.
+    # This should only happen if another process put a value in after our first
+    # queue.get above.  If so, accept the updated value.
+    with queue.rlock:
+      while True:
+        try:
+          value = queue.get(False)
+          got_value = True
+        except Queue.Empty:
+          break
+
+      if got_value:
+        # First re-queue the value, then return it.
+        queue.put(value)
+        return value
+
+      else:
+        # Handle no value differently depending on whether timeout is 0.
+        if timeout == 0:
+          raise ParallelAttributeError(attr)
+        else:
+          raise AttrTimeoutError(attr)
+
+
+class BoardRunAttributes(object):
+  """Convenience class for accessing board-specific run attributes.
+
+  Board-specific run attributes (actually board/target-specific) are saved in
+  the RunAttributes object but under uniquified names.  A BoardRunAttributes
+  object provides access to these attributes using their plain names by
+  providing the board/target information where needed.
+
+  For example, to access the breakpad_symbols_generated board run attribute on
+  a regular RunAttributes object requires this:
+
+    value = attrs.GetBoardParallel('breakpad_symbols_generated', board, target)
+
+  But on a BoardRunAttributes object:
+
+    boardattrs = BoardRunAttributes(attrs, board, target)
+    ...
+    value = boardattrs.GetParallel('breakpad_symbols_generated')
+
+  The same goes for setting values.
+  """
+
+  __slots__ = ('_attrs', '_board', '_target')
+
+  def __init__(self, attrs, board, target):
+    """Initialize.
+
+    Args:
+      attrs: The main RunAttributes object.
+      board: The board name this is specific to.
+      target: The build config name this is specific to.
+    """
+    self._attrs = attrs
+    self._board = board
+    self._target = target
+
+  def SetParallel(self, attr, value, *args, **kwargs):
+    """Set the value of parallel board attribute |attr| to |value|.
+
+    Relay to SetBoardParallel on self._attrs, supplying board and target.
+    See documentation on RunAttributes.SetBoardParallel for more details.
+    """
+    self._attrs.SetBoardParallel(attr, value, self._board, self._target,
+                                 *args, **kwargs)
+
+  def HasParallel(self, attr, *args, **kwargs):
+    """Return True if parallel board attribute |attr| exists.
+
+    Relay to HasBoardParallel on self._attrs, supplying board and target.
+    See documentation on RunAttributes.HasBoardParallel for more details.
+    """
+    return self._attrs.HasBoardParallel(attr, self._board, self._target,
+                                        *args, **kwargs)
+
+  def SetParallelDefault(self, attr, default_value, *args, **kwargs):
+    """Set the value of parallel board attribute |attr| to |value|, if not set.
+
+    Relay to SetBoardParallelDefault on self._attrs, supplying board and target.
+    See documentation on RunAttributes.SetBoardParallelDefault for more details.
+    """
+    self._attrs.SetBoardParallelDefault(attr, default_value, self._board,
+                                        self._target, *args, **kwargs)
+
+  def GetParallel(self, attr, *args, **kwargs):
+    """Get the value of parallel board attribute |attr|.
+
+    Relay to GetBoardParallel on self._attrs, supplying board and target.
+    See documentation on RunAttributes.GetBoardParallel for more details.
+    """
+    return self._attrs.GetBoardParallel(attr, self._board, self._target,
+                                        *args, **kwargs)
+
+
+# TODO(mtennant): Consider renaming this _BuilderRunState, then renaming
+# _RealBuilderRun to _BuilderRunBase.
+class _BuilderRunBase(object):
+  """Class to represent one run of a builder.
+
+  This class should never be instantiated directly, but instead be
+  instantiated as part of a BuilderRun object.
+  """
+
+  # Class-level dict of RunAttributes objects to make it less
+  # problematic to send BuilderRun objects between processes through
+  # pickle.  The 'attrs' attribute on a BuilderRun object will look
+  # up the RunAttributes for that particular BuilderRun here.
+  _ATTRS = {}
+
+  __slots__ = (
+      'site_config',     # SiteConfig for this run.
+      'config',          # BuildConfig for this run.
+      'options',         # The cbuildbot options object for this run.
+
+      # Run attributes set/accessed by stages during the run.  To add support
+      # for a new run attribute add it to the RunAttributes class above.
+      '_attrs_id',       # Object ID for looking up self.attrs.
+
+      # Some pre-computed run configuration values.
+      'buildnumber',     # The build number for this run.
+      'buildroot',       # The build root path for this run.
+      'debug',           # Boolean, represents "dry run" concept, really.
+      'manifest_branch', # The manifest branch to build and test for this run.
+
+      # Some attributes are available as properties.  In particular, attributes
+      # that use self.config must be determined after __init__.
+      # self.bot_id      # Effective name of builder for this run.
+
+      # TODO(mtennant): Other candidates here include:
+      # trybot, buildbot, remote_trybot, chrome_root,
+      # test = (config build_tests AND option tests)
+  )
+
+  def __init__(self, site_config, options, multiprocess_manager):
+    self.site_config = site_config
+    self.options = options
+
+    # Note that self.config is filled in dynamically by either of the classes
+    # that are actually instantiated: BuilderRun and ChildBuilderRun.  In other
+    # words, self.config can be counted on anywhere except in this __init__.
+    # The implication is that any plain attributes that are calculated from
+    # self.config contents must be provided as properties (or methods).
+    # See the _RealBuilderRun class and its __getattr__ method for details.
+    self.config = None
+
+    # Create the RunAttributes object for this BuilderRun and save
+    # the id number for it in order to look it up via attrs property.
+    attrs = RunAttributes(multiprocess_manager)
+    self._ATTRS[id(attrs)] = attrs
+    self._attrs_id = id(attrs)
+
+    # Fill in values for all pre-computed "run configs" now, which are frozen
+    # by this time.
+
+    # TODO(mtennant): Should this use os.path.abspath like builderstage does?
+    self.buildroot = self.options.buildroot
+    self.buildnumber = self.options.buildnumber
+    self.manifest_branch = self.options.branch
+
+    # For remote_trybot runs, options.debug is implied, but we want true dryrun
+    # mode only if --debug was actually specified (i.e. options.debug_forced).
+    # TODO(mtennant): Get rid of confusing debug and debug_forced, if at all
+    # possible.  Also, eventually use "dry_run" and "verbose" options instead to
+    # represent two distinct concepts.
+    self.debug = self.options.debug
+    if self.options.remote_trybot:
+      self.debug = self.options.debug_forced
+
+    # The __slots__ logic above confuses pylint.
+    # https://bitbucket.org/logilab/pylint/issue/380/
+    # pylint: disable=assigning-non-slot
+
+    # Certain run attributes have sensible defaults which can be set here.
+    # This allows all code to safely assume that the run attribute exists.
+    attrs.chrome_version = None
+    attrs.metadata = metadata_lib.CBuildbotMetadata(
+        multiprocess_manager=multiprocess_manager)
+
+  @property
+  def bot_id(self):
+    """Return the bot_id for this run."""
+    return self.config.GetBotId(remote_trybot=self.options.remote_trybot)
+
+  @property
+  def attrs(self):
+    """Look up the RunAttributes object for this BuilderRun object."""
+    return self._ATTRS[self._attrs_id]
+
+  def IsToTBuild(self):
+    """Returns True if Builder is running on ToT."""
+    return self.manifest_branch == 'master'
+
+  def GetArchive(self):
+    """Create an Archive object for this BuilderRun object."""
+    # The Archive class is very lightweight, and is read-only, so it
+    # is ok to generate a new one on demand.  This also avoids worrying
+    # about whether it can go through pickle.
+    # Almost everything the Archive class does requires GetVersion(),
+    # which means it cannot be used until the version has been settled on.
+    # However, because it does have some use before then we provide
+    # the GetVersion function itself to be called when needed later.
+    return archive_lib.Archive(self.bot_id, self.GetVersion, self.options,
+                               self.config)
+
+  def GetBoardRunAttrs(self, board):
+    """Create a BoardRunAttributes object for this run and given |board|."""
+    return BoardRunAttributes(self.attrs, board, self.config.name)
+
+  def GetWaterfall(self):
+    """Gets the waterfall of the current build."""
+    # Metadata dictionary may not have been written at this time (it
+    # should be written in the BuildStartStage), fall back to get the
+    # environment variable in that case. Assume we are on the trybot
+    # waterfall if no waterfall can be found.
+    return (self.attrs.metadata.GetDict().get('buildbot-master-name') or
+            os.environ.get('BUILDBOT_MASTERNAME') or
+            constants.WATERFALL_TRYBOT)
+
+  def GetBuildbotUrl(self):
+    """Gets the URL of the waterfall hosting the current build."""
+    # Metadata dictionary may not have been written at this time (it
+    # should be written in the BuildStartStage), fall back to the
+    # environment variable in that case. Assume we are on the trybot
+    # waterfall if no waterfall can be found.
+    return (self.attrs.metadata.GetDict().get('buildbot-url') or
+            os.environ.get('BUILDBOT_BUILDBOTURL') or
+            constants.TRYBOT_DASHBOARD)
+
+  def GetBuilderName(self):
+    """Get the name of this builder on the current waterfall."""
+    return os.environ.get('BUILDBOT_BUILDERNAME', self.config.name)
+
+  def ConstructDashboardURL(self, stage=None):
+    """Return the dashboard URL
+
+    This is the direct link to buildbot logs as seen in build.chromium.org
+
+    Args:
+      stage: Link to a specific |stage|, otherwise the general buildbot log
+
+    Returns:
+      The fully formed URL
+    """
+    return tree_status.ConstructDashboardURL(
+        self.GetBuildbotUrl(),
+        self.GetBuilderName(),
+        self.options.buildnumber, stage=stage)
+
+  def ShouldBuildAutotest(self):
+    """Return True if this run should build autotest and artifacts."""
+    return self.config.build_tests and self.options.tests
+
+  def ShouldUploadPrebuilts(self):
+    """Return True if this run should upload prebuilts."""
+    return self.options.prebuilts and self.config.prebuilts
+
+  def GetCIDBHandle(self):
+    """Get the build_id and cidb handle, if available.
+
+    Returns:
+      A (build_id, CIDBConnection) tuple if cidb is set up and a build_id is
+      known in metadata. Otherwise, (None, None).
+    """
+    try:
+      build_id = self.attrs.metadata.GetValue('build_id')
+    except KeyError:
+      return (None, None)
+
+    if not cidb.CIDBConnectionFactory.IsCIDBSetup():
+      return (None, None)
+
+    cidb_handle = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+    if cidb_handle:
+      return (build_id, cidb_handle)
+    else:
+      return (None, None)
+
+  def ShouldReexecAfterSync(self):
+    """Return True if this run should re-exec itself after sync stage."""
+    if self.options.postsync_reexec and self.config.postsync_reexec:
+      # Return True if this source is not in designated buildroot.
+      abs_buildroot = os.path.abspath(self.buildroot)
+      return not os.path.abspath(__file__).startswith(abs_buildroot)
+
+    return False
+
+  def ShouldPatchAfterSync(self):
+    """Return True if this run should patch changes after sync stage."""
+    return self.options.postsync_patch and self.config.postsync_patch
+
+  def InProduction(self):
+    """Return True if this is a production run."""
+    return cidb.CIDBConnectionFactory.GetCIDBConnectionType() == 'prod'
+
+  def GetVersionInfo(self):
+    """Helper for picking apart various version bits.
+
+    The Builder must set attrs.version_info before calling this.  Further, it
+    should do so only after the sources have been fully synced & patched, else
+    it could return a confusing value.
+
+    Returns:
+      A manifest_version.VersionInfo object.
+
+    Raises:
+      VersionNotSetError if the version has not yet been set.
+    """
+    if not hasattr(self.attrs, 'version_info'):
+      raise VersionNotSetError('builder must call SetVersionInfo first')
+    return self.attrs.version_info
+
+  def GetVersion(self):
+    """Calculate full R<chrome_version>-<chromeos_version> version string.
+
+    See GetVersionInfo() notes about runtime usage.
+
+    Returns:
+      The version string for this run.
+    """
+    verinfo = self.GetVersionInfo()
+    release_tag = self.attrs.release_tag
+    if release_tag:
+      calc_version = 'R%s-%s' % (verinfo.chrome_branch, release_tag)
+    else:
+      # Non-versioned builds need the build number to uniquify the image.
+      calc_version = 'R%s-%s-b%s' % (verinfo.chrome_branch,
+                                     verinfo.VersionString(),
+                                     self.buildnumber)
+
+    return calc_version
+
+  def DetermineChromeVersion(self):
+    """Determine the current Chrome version in buildroot now and return it.
+
+    This uses the typical portage logic to determine which version of Chrome
+    is active right now in the buildroot.
+
+    Returns:
+      The new value of attrs.chrome_version (e.g. "35.0.1863.0").
+    """
+    cpv = portage_util.BestVisible(constants.CHROME_CP,
+                                   buildroot=self.buildroot)
+    return cpv.version_no_rev.partition('_')[0]
+
+
+class _RealBuilderRun(object):
+  """Base BuilderRun class that manages self.config access.
+
+  For any builder run, sometimes the build config is the top-level config and
+  sometimes it is a "child" config.  In either case, the config to use should
+  override self.config for all cases.  This class provides a mechanism for
+  overriding self.config access generally.
+
+  Also, methods that do more than access state for a BuilderRun should
+  live here.  In particular, any method that uses 'self' as an object
+  directly should be here rather than _BuilderRunBase.
+  """
+
+  __slots__ = _BuilderRunBase.__slots__ + (
+      '_run_base',  # The _BuilderRunBase object where most functionality is.
+      '_config',    # Config to use for dynamically overriding self.config.
+  )
+
+  def __init__(self, run_base, build_config):
+    self._run_base = run_base
+    self._config = build_config
+
+    # Make sure self.attrs has board-specific attributes for each board
+    # in build_config.
+    for board in build_config.boards:
+      self.attrs.RegisterBoardAttrs(board, build_config.name)
+
+  def __getattr__(self, attr):
+    # Remember, __getattr__ only called if attribute was not found normally.
+    # In normal usage, the __init__ guarantees that self._run_base and
+    # self._config will be present.  However, the unpickle process bypasses
+    # __init__, and this object must be pickle-able.  That is why we access
+    # self._run_base and self._config through __getattribute__ here, otherwise
+    # unpickling results in infinite recursion.
+    # TODO(mtennant): Revisit this if pickling support is changed to go through
+    # the __init__ method, such as by supplying __reduce__ method.
+    run_base = self.__getattribute__('_run_base')
+    config = self.__getattribute__('_config')
+
+    # TODO(akeshet): This logic seems to have a subtle flaky bug that only
+    # manifests itself when using unit tests with ParallelMock. As a workaround,
+    # we have simply eliminiated ParallelMock from the affected tests. See
+    # crbug.com/470907 for context.
+    try:
+      # run_base.config should always be None except when accessed through
+      # this routine.  Override the value here, then undo later.
+      run_base.config = config
+
+      result = getattr(run_base, attr)
+      if isinstance(result, types.MethodType):
+        # Make sure run_base.config is also managed when the method is called.
+        @functools.wraps(result)
+        def FuncWrapper(*args, **kwargs):
+          run_base.config = config
+          try:
+            return result(*args, **kwargs)
+          finally:
+            run_base.config = None
+
+        # TODO(mtennant): Find a way to make the following actually work.  It
+        # makes pickling more complicated, unfortunately.
+        # Cache this function wrapper to re-use next time without going through
+        # __getattr__ again.  This ensures that the same wrapper object is used
+        # each time, which is nice for identity and equality checks.  Subtle
+        # gotcha that we accept: if the function itself on run_base is replaced
+        # then this will continue to provide the behavior of the previous one.
+        #setattr(self, attr, FuncWrapper)
+
+        return FuncWrapper
+      else:
+        return result
+
+    finally:
+      run_base.config = None
+
+  def GetChildren(self):
+    """Get ChildBuilderRun objects for child configs, if they exist.
+
+    Returns:
+      List of ChildBuilderRun objects if self.config has child_configs.  []
+        otherwise.
+    """
+    # If there are child configs, construct a list of ChildBuilderRun objects
+    # for those child configs and return that.
+    return [ChildBuilderRun(self, ix)
+            for ix in range(len(self.config.child_configs))]
+
+  def GetUngroupedBuilderRuns(self):
+    """Same as GetChildren, but defaults to [self] if no children exist.
+
+    Returns:
+      Result of self.GetChildren, if children exist, otherwise [self].
+    """
+    return self.GetChildren() or [self]
+
+  def GetBuilderIds(self):
+    """Return a list of builder names for this config and the child configs."""
+    bot_ids = [self.config.name]
+    for config in self.config.child_configs:
+      if config.name:
+        bot_ids.append(config.name)
+    return bot_ids
+
+
+class BuilderRun(_RealBuilderRun):
+  """A standard BuilderRun for a top-level build config."""
+
+  def __init__(self, options, site_config, build_config, multiprocess_manager):
+    """Initialize.
+
+    Args:
+      options: Command line options from this cbuildbot run.
+      site_config: Site config for this cbuildbot run.
+      build_config: Build config for this cbuildbot run.
+      multiprocess_manager: A multiprocessing.Manager.
+    """
+    run_base = _BuilderRunBase(site_config, options, multiprocess_manager)
+    super(BuilderRun, self).__init__(run_base, build_config)
+
+
+class ChildBuilderRun(_RealBuilderRun):
+  """A BuilderRun for a "child" build config."""
+
+  def __init__(self, builder_run, child_index):
+    """Initialize.
+
+    Args:
+      builder_run: BuilderRun for the parent (main) cbuildbot run.  Extract
+        the _BuilderRunBase from it to make sure the same base is used for
+        both the main cbuildbot run and any child runs.
+      child_index: The child index of this child run, used to index into
+        the main run's config.child_configs.
+    """
+    # pylint: disable=W0212
+    run_base = builder_run._run_base
+    config = builder_run.config.child_configs[child_index]
+    super(ChildBuilderRun, self).__init__(run_base, config)
diff --git a/cbuildbot/cbuildbot_run_unittest b/cbuildbot/cbuildbot_run_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/cbuildbot_run_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/cbuildbot_run_unittest.py b/cbuildbot/cbuildbot_run_unittest.py
new file mode 100644
index 0000000..e399c5d
--- /dev/null
+++ b/cbuildbot/cbuildbot_run_unittest.py
@@ -0,0 +1,650 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the cbuildbot_run module."""
+
+from __future__ import print_function
+
+import cPickle
+import os
+import mock
+import time
+
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel
+
+
+DEFAULT_ARCHIVE_GS_PATH = 'bogus_bucket/TheArchiveBase'
+DEFAULT_ARCHIVE_BASE = 'gs://%s' % DEFAULT_ARCHIVE_GS_PATH
+DEFAULT_BUILDROOT = '/tmp/foo/bar/buildroot'
+DEFAULT_BUILDNUMBER = 12345
+DEFAULT_BRANCH = 'TheBranch'
+DEFAULT_CHROME_BRANCH = 'TheChromeBranch'
+DEFAULT_VERSION_STRING = 'TheVersionString'
+DEFAULT_BOARD = 'TheBoard'
+DEFAULT_BOT_NAME = 'TheCoolBot'
+
+# pylint: disable=protected-access
+
+DEFAULT_OPTIONS = cros_test_lib.EasyAttr(
+    archive_base=DEFAULT_ARCHIVE_BASE,
+    buildroot=DEFAULT_BUILDROOT,
+    buildnumber=DEFAULT_BUILDNUMBER,
+    buildbot=True,
+    branch=DEFAULT_BRANCH,
+    remote_trybot=False,
+    debug=False,
+    postsync_patch=True,
+)
+DEFAULT_CONFIG = config_lib.BuildConfig(
+    name=DEFAULT_BOT_NAME,
+    master=True,
+    boards=[DEFAULT_BOARD],
+    postsync_patch=True,
+    child_configs=[
+        config_lib.BuildConfig(
+            name='foo', postsync_patch=False, boards=[]),
+        config_lib.BuildConfig(
+            name='bar', postsync_patch=False, boards=[]),
+    ],
+)
+
+DEFAULT_VERSION = '6543.2.1'
+
+
+def _ExtendDefaultOptions(**kwargs):
+  """Extend DEFAULT_OPTIONS with keys/values in kwargs."""
+  options_kwargs = DEFAULT_OPTIONS.copy()
+  options_kwargs.update(kwargs)
+  return cros_test_lib.EasyAttr(**options_kwargs)
+
+
+def _ExtendDefaultConfig(**kwargs):
+  """Extend DEFAULT_CONFIG with keys/values in kwargs."""
+  config_kwargs = DEFAULT_CONFIG.copy()
+  config_kwargs.update(kwargs)
+  return config_lib.BuildConfig(**config_kwargs)
+
+
+class ExceptionsTest(cros_test_lib.TestCase):
+  """Test that the exceptions in the module are sane."""
+
+  def _TestException(self, err, expected_startswith):
+    """Test that str and pickle behavior of |err| are as expected."""
+    err2 = cPickle.loads(cPickle.dumps(err, cPickle.HIGHEST_PROTOCOL))
+
+    self.assertTrue(str(err).startswith(expected_startswith))
+    self.assertEqual(str(err), str(err2))
+
+  def testParallelAttributeError(self):
+    """Test ParallelAttributeError message and pickle behavior."""
+    err1 = cbuildbot_run.ParallelAttributeError('SomeAttr')
+    self._TestException(err1, 'No such parallel run attribute')
+
+    err2 = cbuildbot_run.ParallelAttributeError('SomeAttr', 'SomeBoard',
+                                                'SomeTarget')
+    self._TestException(err2, 'No such board-specific parallel run attribute')
+
+  def testAttrSepCountError(self):
+    """Test AttrSepCountError message and pickle behavior."""
+    err1 = cbuildbot_run.AttrSepCountError('SomeAttr')
+    self._TestException(err1, 'Attribute name has an unexpected number')
+
+  def testAttrNotPickleableError(self):
+    """Test AttrNotPickleableError message and pickle behavior."""
+    err1 = cbuildbot_run.AttrNotPickleableError('SomeAttr', 'SomeValue')
+    self._TestException(err1, 'Run attribute "SomeAttr" value cannot')
+
+
+# TODO(mtennant): Turn this into a PartialMock.
+class _BuilderRunTestCase(cros_test_lib.MockTestCase):
+  """Provide methods for creating BuilderRun or ChildBuilderRun."""
+
+  def setUp(self):
+    self._manager = parallel.Manager()
+
+    # Mimic entering a 'with' statement.
+    self._manager.__enter__()
+
+  def tearDown(self):
+    # Mimic exiting a 'with' statement.
+    self._manager.__exit__(None, None, None)
+
+  def _NewRunAttributes(self):
+    return cbuildbot_run.RunAttributes(self._manager)
+
+  def _NewBuilderRun(self, options=None, config=None):
+    """Create a BuilderRun objection from options and config values.
+
+    Args:
+      options: Specify options or default to DEFAULT_OPTIONS.
+      config: Specify build config or default to DEFAULT_CONFIG.
+
+    Returns:
+      BuilderRun object.
+    """
+    options = options or DEFAULT_OPTIONS
+    config = config or DEFAULT_CONFIG
+    site_config = config_lib_unittest.MockSiteConfig()
+    site_config[config.name] = config
+
+    return cbuildbot_run.BuilderRun(options, site_config, config, self._manager)
+
+  def _NewChildBuilderRun(self, child_index, options=None, config=None):
+    """Create a ChildBuilderRun objection from options and config values.
+
+    Args:
+      child_index: Index of child config to use within config.
+      options: Specify options or default to DEFAULT_OPTIONS.
+      config: Specify build config or default to DEFAULT_CONFIG.
+
+    Returns:
+      ChildBuilderRun object.
+    """
+    run = self._NewBuilderRun(options, config)
+    return cbuildbot_run.ChildBuilderRun(run, child_index)
+
+
+class BuilderRunPickleTest(_BuilderRunTestCase):
+  """Make sure BuilderRun objects can be pickled."""
+
+  def setUp(self):
+    self.real_config = chromeos_config.GetConfig()['x86-alex-release-group']
+    self.PatchObject(cbuildbot_run._BuilderRunBase, 'GetVersion',
+                     return_value=DEFAULT_VERSION)
+
+  def _TestPickle(self, run1):
+    self.assertEquals(DEFAULT_VERSION, run1.GetVersion())
+    run1.attrs.release_tag = 'TheReleaseTag'
+
+    # Accessing a method on BuilderRun has special behavior, so access and
+    # use one before pickling.
+    patch_after_sync = run1.ShouldPatchAfterSync()
+
+    # Access the archive object before pickling, too.
+    upload_url = run1.GetArchive().upload_url
+
+    # Pickle and unpickle run1 into run2.
+    run2 = cPickle.loads(cPickle.dumps(run1, cPickle.HIGHEST_PROTOCOL))
+
+    self.assertEquals(run1.buildnumber, run2.buildnumber)
+    self.assertEquals(run1.config.boards, run2.config.boards)
+    self.assertEquals(run1.options.branch, run2.options.branch)
+    self.assertEquals(run1.attrs.release_tag, run2.attrs.release_tag)
+    self.assertRaises(AttributeError, getattr, run1.attrs, 'manifest_manager')
+    self.assertRaises(AttributeError, getattr, run2.attrs, 'manifest_manager')
+    self.assertEquals(patch_after_sync, run2.ShouldPatchAfterSync())
+    self.assertEquals(upload_url, run2.GetArchive().upload_url)
+
+    # The attrs objects should be identical.
+    self.assertIs(run1.attrs, run2.attrs)
+
+    # And the run objects themselves are different.
+    self.assertIsNot(run1, run2)
+
+  def testPickleBuilderRun(self):
+    self._TestPickle(self._NewBuilderRun(config=self.real_config))
+
+  def testPickleChildBuilderRun(self):
+    self._TestPickle(self._NewChildBuilderRun(0, config=self.real_config))
+
+
+class BuilderRunTest(_BuilderRunTestCase):
+  """Test the BuilderRun class."""
+
+  def testInit(self):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = DEFAULT_VERSION
+
+      run = self._NewBuilderRun()
+      self.assertEquals(DEFAULT_BUILDROOT, run.buildroot)
+      self.assertEquals(DEFAULT_BUILDNUMBER, run.buildnumber)
+      self.assertEquals(DEFAULT_BRANCH, run.manifest_branch)
+      self.assertEquals(DEFAULT_OPTIONS, run.options)
+      self.assertEquals(DEFAULT_CONFIG, run.config)
+      self.assertTrue(isinstance(run.attrs, cbuildbot_run.RunAttributes))
+      self.assertTrue(isinstance(run.GetArchive(),
+                                 cbuildbot_run.archive_lib.Archive))
+
+      # Make sure methods behave normally, since BuilderRun messes with them.
+      meth1 = run.GetVersionInfo
+      meth2 = run.GetVersionInfo
+      self.assertEqual(meth1.__name__, meth2.__name__)
+
+      # We actually do not support identity and equality checks right now.
+      self.assertNotEqual(meth1, meth2)
+      self.assertIsNot(meth1, meth2)
+
+  def testOptions(self):
+    options = _ExtendDefaultOptions(foo=True, bar=10)
+    run = self._NewBuilderRun(options=options)
+
+    self.assertEquals(True, run.options.foo)
+    self.assertEquals(10, run.options.__getattr__('bar'))
+    self.assertRaises(AttributeError, run.options.__getattr__, 'baz')
+
+  def testConfig(self):
+    config = _ExtendDefaultConfig(foo=True, bar=10)
+    run = self._NewBuilderRun(config=config)
+
+    self.assertEquals(True, run.config.foo)
+    self.assertEquals(10, run.config.__getattr__('bar'))
+    self.assertRaises(AttributeError, run.config.__getattr__, 'baz')
+
+  def testAttrs(self):
+    run = self._NewBuilderRun()
+
+    # manifest_manager is a valid run attribute.  It gives Attribute error
+    # if accessed before being set, but thereafter works fine.
+    self.assertRaises(AttributeError, run.attrs.__getattribute__,
+                      'manifest_manager')
+    run.attrs.manifest_manager = 'foo'
+    self.assertEquals('foo', run.attrs.manifest_manager)
+    self.assertEquals('foo', run.attrs.__getattribute__('manifest_manager'))
+
+    # foobar is not a valid run attribute.  It gives AttributeError when
+    # accessed or changed.
+    self.assertRaises(AttributeError, run.attrs.__getattribute__, 'foobar')
+    self.assertRaises(AttributeError, run.attrs.__setattr__, 'foobar', 'foo')
+
+  def testArchive(self):
+    run = self._NewBuilderRun()
+
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = DEFAULT_VERSION
+
+      archive = run.GetArchive()
+
+      # Check archive.archive_path.
+      expected = ('%s/%s/%s/%s' %
+                  (DEFAULT_BUILDROOT,
+                   cbuildbot_run.archive_lib.Archive._BUILDBOT_ARCHIVE,
+                   DEFAULT_BOT_NAME, DEFAULT_VERSION))
+      self.assertEqual(expected, archive.archive_path)
+
+      # Check archive.upload_url.
+      expected = '%s/%s/%s' % (DEFAULT_ARCHIVE_BASE, DEFAULT_BOT_NAME,
+                               DEFAULT_VERSION)
+      self.assertEqual(expected, archive.upload_url)
+
+      # Check archive.download_url.
+      expected = ('%s%s/%s/%s' %
+                  (cbuildbot_run.archive_lib.gs.PRIVATE_BASE_HTTPS_URL,
+                   DEFAULT_ARCHIVE_GS_PATH, DEFAULT_BOT_NAME, DEFAULT_VERSION))
+      self.assertEqual(expected, archive.download_url)
+
+  def _RunAccessor(self, method_name, options_dict, config_dict):
+    """Run the given accessor method of the BuilderRun class.
+
+    Create a BuilderRun object with the options and config provided and
+    then return the result of calling the given method on it.
+
+    Args:
+      method_name: A BuilderRun method to call, specified by name.
+      options_dict: Extend default options with this.
+      config_dict: Extend default config with this.
+
+    Returns:
+      Result of calling the given method.
+    """
+    options = _ExtendDefaultOptions(**options_dict)
+    config = _ExtendDefaultConfig(**config_dict)
+    run = self._NewBuilderRun(options=options, config=config)
+    method = getattr(run, method_name)
+    self.assertEqual(method.__name__, method_name)
+    return method()
+
+  def testDualEnableSetting(self):
+    settings = {
+        'prebuilts': 'ShouldUploadPrebuilts',
+        'postsync_patch': 'ShouldPatchAfterSync',
+    }
+
+    # Both option and config enabled should result in True.
+    # Create truth table with three variables in this order:
+    # <key> option value, <key> config value (e.g. <key> == 'prebuilts').
+    truth_table = cros_test_lib.TruthTable(inputs=[(True, True)])
+
+    for inputs in truth_table:
+      option_val, config_val = inputs
+      for key, accessor in settings.iteritems():
+        self.assertEquals(
+            self._RunAccessor(accessor, {key: option_val}, {key: config_val}),
+            truth_table.GetOutput(inputs))
+
+  def testShouldReexecAfterSync(self):
+    # If option and config have postsync_reexec enabled, and this file is not
+    # in the build root, then we expect ShouldReexecAfterSync to return True.
+
+    # Construct a truth table across three variables in this order:
+    # postsync_reexec option value, postsync_reexec config value, same_root.
+    truth_table = cros_test_lib.TruthTable(inputs=[(True, True, False)])
+
+    for inputs in truth_table:
+      option_val, config_val, same_root = inputs
+
+      if same_root:
+        build_root = os.path.dirname(os.path.dirname(__file__))
+      else:
+        build_root = DEFAULT_BUILDROOT
+
+      result = self._RunAccessor(
+          'ShouldReexecAfterSync',
+          {'postsync_reexec': option_val, 'buildroot': build_root},
+          {'postsync_reexec': config_val})
+
+      self.assertEquals(result, truth_table.GetOutput(inputs))
+
+
+class GetVersionTest(_BuilderRunTestCase):
+  """Test the GetVersion and GetVersionInfo methods of BuilderRun class."""
+
+  # pylint: disable=protected-access
+
+  def testGetVersionInfoNotSet(self):
+    """Verify we throw an error when the version hasn't been set."""
+    run = self._NewBuilderRun()
+    self.assertRaises(RuntimeError, run.GetVersionInfo)
+
+  def testGetVersionInfo(self):
+    """Verify we return the right version info value."""
+    # Prepare a real BuilderRun object with a version_info tag.
+    run = self._NewBuilderRun()
+    verinfo = object()
+    run.attrs.version_info = verinfo
+    result = run.GetVersionInfo()
+    self.assertEquals(verinfo, result)
+
+  def _TestGetVersionReleaseTag(self, release_tag):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase,
+                           'GetVersionInfo') as m:
+      verinfo_mock = mock.Mock()
+      verinfo_mock.chrome_branch = DEFAULT_CHROME_BRANCH
+      verinfo_mock.VersionString = mock.Mock(return_value='VS')
+      m.return_value = verinfo_mock
+
+      # Prepare a real BuilderRun object with a release tag.
+      run = self._NewBuilderRun()
+      run.attrs.release_tag = release_tag
+
+      # Run the test return the result.
+      result = run.GetVersion()
+      m.assert_called_once_with()
+      if release_tag is None:
+        verinfo_mock.VersionString.assert_called_once()
+
+      return result
+
+  def testGetVersionReleaseTag(self):
+    result = self._TestGetVersionReleaseTag('RT')
+    self.assertEquals('R%s-%s' % (DEFAULT_CHROME_BRANCH, 'RT'), result)
+
+  def testGetVersionNoReleaseTag(self):
+    result = self._TestGetVersionReleaseTag(None)
+    expected_result = ('R%s-%s-b%s' %
+                       (DEFAULT_CHROME_BRANCH, 'VS', DEFAULT_BUILDNUMBER))
+    self.assertEquals(result, expected_result)
+
+
+class ChildBuilderRunTest(_BuilderRunTestCase):
+  """Test the ChildBuilderRun class"""
+
+  def testInit(self):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = DEFAULT_VERSION
+
+      crun = self._NewChildBuilderRun(0)
+      self.assertEquals(DEFAULT_BUILDROOT, crun.buildroot)
+      self.assertEquals(DEFAULT_BUILDNUMBER, crun.buildnumber)
+      self.assertEquals(DEFAULT_BRANCH, crun.manifest_branch)
+      self.assertEquals(DEFAULT_OPTIONS, crun.options)
+      self.assertEquals(DEFAULT_CONFIG.child_configs[0], crun.config)
+      self.assertEquals('foo', crun.config.name)
+      self.assertTrue(isinstance(crun.attrs, cbuildbot_run.RunAttributes))
+      self.assertTrue(isinstance(crun.GetArchive(),
+                                 cbuildbot_run.archive_lib.Archive))
+
+      # Make sure methods behave normally, since BuilderRun messes with them.
+      meth1 = crun.GetVersionInfo
+      meth2 = crun.GetVersionInfo
+      self.assertEqual(meth1.__name__, meth2.__name__)
+
+      # We actually do not support identity and equality checks right now.
+      self.assertNotEqual(meth1, meth2)
+      self.assertIsNot(meth1, meth2)
+
+
+class RunAttributesTest(_BuilderRunTestCase):
+  """Test the RunAttributes class."""
+
+  BOARD = 'SomeBoard'
+  TARGET = 'SomeConfigName'
+  VALUE = 'AnyValueWillDo'
+
+  # Any valid board-specific attribute will work here.
+  BATTR = 'breakpad_symbols_generated'
+
+  def testRegisterBoardTarget(self):
+    """Test behavior of attributes before and after registering board target."""
+    ra = self._NewRunAttributes()
+
+    with self.assertRaises(AssertionError):
+      ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET)
+
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+
+    self.assertFalse(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+    ra.SetBoardParallel(self.BATTR, 'TheValue', self.BOARD, self.TARGET)
+
+    self.assertTrue(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+  def testSetGet(self):
+    """Test simple set/get of regular and parallel run attributes."""
+    ra = self._NewRunAttributes()
+    value = 'foobar'
+
+    # The __slots__ logic above confuses pylint.
+    # https://bitbucket.org/logilab/pylint/issue/380/
+    # pylint: disable=assigning-non-slot
+
+    # Set/Get a regular run attribute using direct access.
+    ra.release_tag = value
+    self.assertEqual(value, ra.release_tag)
+
+    # Set/Get of a parallel run attribute using direct access fails.
+    self.assertRaises(AttributeError, setattr, ra, 'unittest_value', value)
+    self.assertRaises(AttributeError, getattr, ra, 'unittest_value')
+
+    # Set/Get of a parallel run attribute with supported interface.
+    ra.SetParallel('unittest_value', value)
+    self.assertEqual(value, ra.GetParallel('unittest_value'))
+
+    # Set/Get a board parallel run attribute, testing both the encouraged
+    # interface and the underlying interface.
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+    ra.SetBoardParallel(self.BATTR, value, self.BOARD, self.TARGET)
+    self.assertEqual(value,
+                     ra.GetBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+  def testSetDefault(self):
+    """Test setting default value of parallel run attributes."""
+    ra = self._NewRunAttributes()
+    value = 'foobar'
+
+    # Attribute starts off not set.
+    self.assertFalse(ra.HasParallel('unittest_value'))
+
+    # Use SetParallelDefault to set it.
+    ra.SetParallelDefault('unittest_value', value)
+    self.assertTrue(ra.HasParallel('unittest_value'))
+    self.assertEqual(value, ra.GetParallel('unittest_value'))
+
+    # Calling SetParallelDefault again has no effect.
+    ra.SetParallelDefault('unittest_value', 'junk')
+    self.assertTrue(ra.HasParallel('unittest_value'))
+    self.assertEqual(value, ra.GetParallel('unittest_value'))
+
+    # Run through same sequence for a board-specific attribute.
+    with self.assertRaises(AssertionError):
+      ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET)
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+    self.assertFalse(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+    # Use SetBoardParallelDefault to set it.
+    ra.SetBoardParallelDefault(self.BATTR, value, self.BOARD, self.TARGET)
+    self.assertTrue(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+    self.assertEqual(value,
+                     ra.GetBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+    # Calling SetBoardParallelDefault again has no effect.
+    ra.SetBoardParallelDefault(self.BATTR, 'junk', self.BOARD, self.TARGET)
+    self.assertTrue(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+    self.assertEqual(value,
+                     ra.GetBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+  def testAttributeError(self):
+    """Test accessing run attributes that do not exist."""
+    ra = self._NewRunAttributes()
+    value = 'foobar'
+
+    # Set/Get on made up attribute name.
+    self.assertRaises(AttributeError, setattr, ra, 'foo', value)
+    self.assertRaises(AttributeError, getattr, ra, 'foo')
+
+    # A board/target value is valid, but only if it is registered first.
+    self.assertRaises(AssertionError, ra.GetBoardParallel,
+                      self.BATTR, self.BOARD, self.TARGET)
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+    self.assertRaises(AttributeError, ra.GetBoardParallel,
+                      self.BATTR, self.BOARD, self.TARGET)
+
+
+class BoardRunAttributesTest(_BuilderRunTestCase):
+  """Test the BoardRunAttributes class."""
+
+  BOARD = 'SomeBoard'
+  TARGET = 'SomeConfigName'
+  VALUE = 'AnyValueWillDo'
+
+  # Any valid board-specific attribute will work here.
+  BATTR = 'breakpad_symbols_generated'
+
+  class _SetAttr(object):
+    """Stage-like class to set attr on a BoardRunAttributes obj."""
+    def __init__(self, bra, attr, value, delay=1):
+      self.bra = bra
+      self.attr = attr
+      self.value = value
+      self.delay = delay
+
+    def Run(self):
+      if self.delay:
+        time.sleep(self.delay)
+      self.bra.SetParallel(self.attr, self.value)
+
+  class _WaitForAttr(object):
+    """Stage-like class to wait for attr on BoardRunAttributes obj."""
+    def __init__(self, bra, attr, expected_value, timeout=10):
+      self.bra = bra
+      self.attr = attr
+      self.expected_value = expected_value
+      self.timeout = timeout
+
+    def GetParallel(self):
+      return self.bra.GetParallel(self.attr, timeout=self.timeout)
+
+  class _CheckWaitForAttr(_WaitForAttr):
+    """Stage-like class to wait for then check attr on BoardRunAttributes."""
+    def Run(self):
+      value = self.GetParallel()
+      assert value == self.expected_value, \
+          ('For run attribute %s expected value %r but got %r.' %
+           (self.attr, self.expected_value, value))
+
+  class _TimeoutWaitForAttr(_WaitForAttr):
+    """Stage-like class to time-out waiting for attr on BoardRunAttributes."""
+    def Run(self):
+      try:
+        self.GetParallel()
+        assert False, 'Expected AttrTimeoutError'
+      except cbuildbot_run.AttrTimeoutError:
+        pass
+
+  def setUp(self):
+    self.ra = self._NewRunAttributes()
+    self.bra = self.ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+
+  def _TestParallelSetGet(self, stage_args):
+    """Helper to run "stages" in parallel, according to |stage_args|.
+
+    Args:
+      stage_args: List of tuples of the form (stage_object, extra_args, ...)
+        where stage_object has a Run method which takes a BoardRunAttributes
+        object as the first argument and extra_args for the remaining arguments.
+    """
+    stages = [a[0](self.bra, *a[1:]) for a in stage_args]
+    steps = [stage.Run for stage in stages]
+
+    parallel.RunParallelSteps(steps)
+
+  def testParallelSetGetFast(self):
+    """Pass the parallel run attribute around with no delay."""
+    stage_args = [
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE),
+        (self._SetAttr, self.BATTR, self.VALUE),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertRaises(AttributeError,
+                      getattr, self.bra, self.BATTR)
+    self.assertEqual(self.VALUE, self.bra.GetParallel(self.BATTR))
+
+  def testParallelSetGetSlow(self):
+    """Pass the parallel run attribute around with a delay."""
+    stage_args = [
+        (self._SetAttr, self.BATTR, self.VALUE, 10),
+        (self._TimeoutWaitForAttr, self.BATTR, self.VALUE, 2),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertEqual(self.VALUE, self.bra.GetParallel(self.BATTR))
+
+  def testParallelSetGetManyGets(self):
+    """Set the parallel run attribute in one stage, access in many stages."""
+    stage_args = [
+        (self._SetAttr, self.BATTR, self.VALUE, 8),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE, 16),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE, 16),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE, 16),
+        (self._TimeoutWaitForAttr, self.BATTR, self.VALUE, 1),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertEqual(self.VALUE, self.bra.GetParallel(self.BATTR))
+
+  def testParallelSetGetManySets(self):
+    """Set the parallel run attribute in many stages, access in one stage."""
+    # Three "stages" set the value, with increasing delays.  The stage that
+    # checks the value should get the first value set.
+    stage_args = [
+        (self._SetAttr, self.BATTR, self.VALUE + '1', 1),
+        (self._SetAttr, self.BATTR, self.VALUE + '2', 11),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE + '1', 12),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertEqual(self.VALUE + '2', self.bra.GetParallel(self.BATTR))
+
+  def testSetGet(self):
+    """Test that board-specific attrs do not work with set/get directly."""
+    self.assertRaises(AttributeError, setattr,
+                      self.bra, 'breakpad_symbols_generated', self.VALUE)
+    self.assertRaises(AttributeError, getattr,
+                      self.bra, 'breakpad_symbols_generated')
+
+  def testAccessRegularRunAttr(self):
+    """Test that regular attributes are not known to BoardRunAttributes."""
+    self.assertRaises(AttributeError, getattr, self.bra, 'release_tag')
+    self.assertRaises(AttributeError, setattr, self.bra, 'release_tag', 'foo')
diff --git a/cbuildbot/cbuildbot_unittest b/cbuildbot/cbuildbot_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/cbuildbot_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/cbuildbot_unittest.py b/cbuildbot/cbuildbot_unittest.py
new file mode 100644
index 0000000..c17bb78
--- /dev/null
+++ b/cbuildbot/cbuildbot_unittest.py
@@ -0,0 +1,500 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the cbuildbot script."""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import optparse
+import os
+
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib_unittest
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot.builders import simple_builders
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import partial_mock
+from chromite.scripts import cbuildbot
+
+
+# pylint: disable=protected-access
+
+
+class BuilderRunMock(partial_mock.PartialMock):
+  """Partial mock for BuilderRun class."""
+
+  TARGET = 'chromite.cbuildbot.cbuildbot_run._BuilderRunBase'
+  ATTRS = ('GetVersionInfo', 'DetermineChromeVersion',)
+
+  def __init__(self, verinfo):
+    super(BuilderRunMock, self).__init__()
+    self._version_info = verinfo
+
+  def GetVersionInfo(self, _inst):
+    """This way builders don't have to set the version from the overlay"""
+    return self._version_info
+
+  def DetermineChromeVersion(self, _inst):
+    """Normaly this runs a portage command to look at the chrome ebuild"""
+    return self._version_info.chrome_branch
+
+
+class SimpleBuilderTestCase(cros_test_lib.MockTestCase):
+  """Common stubs for SimpleBuilder tests."""
+
+  CHROME_BRANCH = '27'
+  VERSION = '1234.5.6'
+
+  def setUp(self):
+    verinfo = manifest_version.VersionInfo(
+        version_string=self.VERSION, chrome_branch=self.CHROME_BRANCH)
+
+    self.StartPatcher(BuilderRunMock(verinfo))
+
+    self.PatchObject(simple_builders.SimpleBuilder, 'GetVersionInfo',
+                     return_value=verinfo)
+
+
+class TestArgsparseError(Exception):
+  """Exception used by parser.error() mock to halt execution."""
+
+
+class TestHaltedException(Exception):
+  """Exception used by mocks to halt execution without indicating failure."""
+
+
+class RunBuildStagesTest(cros_build_lib_unittest.RunCommandTempDirTestCase,
+                         SimpleBuilderTestCase):
+  """Test that cbuildbot runs the appropriate stages for a given config."""
+
+  def setUp(self):
+    self.buildroot = os.path.join(self.tempdir, 'buildroot')
+    osutils.SafeMakedirs(self.buildroot)
+    # Always stub RunCommmand out as we use it in every method.
+    self.site_config = config_lib_unittest.MockSiteConfig()
+    self.build_config = config_lib_unittest.MockBuildConfig()
+    self.bot_id = self.build_config.name
+    self.build_config['master'] = False
+    self.build_config['important'] = False
+
+    # Use the cbuildbot parser to create properties and populate default values.
+    self.parser = cbuildbot._CreateParser()
+
+    argv = ['-r', self.buildroot, '--buildbot', '--debug', self.bot_id]
+    self.options, _ = cbuildbot._ParseCommandLine(self.parser, argv)
+    self.options.bootstrap = False
+    self.options.clean = False
+    self.options.resume = False
+    self.options.sync = False
+    self.options.build = False
+    self.options.uprev = False
+    self.options.tests = False
+    self.options.archive = False
+    self.options.remote_test_status = False
+    self.options.patches = None
+    self.options.prebuilts = False
+
+    self._manager = parallel.Manager()
+    self._manager.__enter__()
+    self.run = cbuildbot_run.BuilderRun(self.options, self.site_config,
+                                        self.build_config, self._manager)
+
+    self.rc.AddCmdResult(
+        [constants.PATH_TO_CBUILDBOT, '--reexec-api-version'],
+        output=constants.REEXEC_API_VERSION)
+
+  def tearDown(self):
+    # Mimic exiting a 'with' statement.
+    if hasattr(self, '_manager'):
+      self._manager.__exit__(None, None, None)
+
+  def testChromeosOfficialSet(self):
+    """Verify that CHROMEOS_OFFICIAL is set correctly."""
+    self.build_config['chromeos_official'] = True
+
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+
+    # Clean up before.
+    os.environ.pop('CHROMEOS_OFFICIAL', None)
+    simple_builders.SimpleBuilder(self.run).Run()
+    self.assertIn('CHROMEOS_OFFICIAL', os.environ)
+
+  def testChromeosOfficialNotSet(self):
+    """Verify that CHROMEOS_OFFICIAL is not always set."""
+    self.build_config['chromeos_official'] = False
+
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+
+    # Clean up before.
+    os.environ.pop('CHROMEOS_OFFICIAL', None)
+    simple_builders.SimpleBuilder(self.run).Run()
+    self.assertNotIn('CHROMEOS_OFFICIAL', os.environ)
+
+
+class LogTest(cros_test_lib.TempDirTestCase):
+  """Test logging functionality."""
+
+  def _generateLogs(self, num):
+    """Generates cbuildbot.log and num backups."""
+    with open(os.path.join(self.tempdir, 'cbuildbot.log'), 'w') as f:
+      f.write(str(num + 1))
+
+    for i in range(1, num + 1):
+      with open(os.path.join(self.tempdir, 'cbuildbot.log.' + str(i)),
+                'w') as f:
+        f.write(str(i))
+
+  def testZeroToOneLogs(self):
+    """Test beginning corner case."""
+    self._generateLogs(0)
+    cbuildbot._BackupPreviousLog(os.path.join(self.tempdir, 'cbuildbot.log'),
+                                 backup_limit=25)
+    with open(os.path.join(self.tempdir, 'cbuildbot.log.1')) as f:
+      self.assertEquals(f.readline(), '1')
+
+  def testNineToTenLogs(self):
+    """Test handling *.log.9 to *.log.10 (correct sorting)."""
+    self._generateLogs(9)
+    cbuildbot._BackupPreviousLog(os.path.join(self.tempdir, 'cbuildbot.log'),
+                                 backup_limit=25)
+    with open(os.path.join(self.tempdir, 'cbuildbot.log.10')) as f:
+      self.assertEquals(f.readline(), '10')
+
+  def testOverLimit(self):
+    """Test going over the limit and having to purge old logs."""
+    self._generateLogs(25)
+    cbuildbot._BackupPreviousLog(os.path.join(self.tempdir, 'cbuildbot.log'),
+                                 backup_limit=25)
+    with open(os.path.join(self.tempdir, 'cbuildbot.log.26')) as f:
+      self.assertEquals(f.readline(), '26')
+
+    self.assertEquals(len(glob.glob(os.path.join(self.tempdir, 'cbuildbot*'))),
+                      25)
+
+
+class InterfaceTest(cros_test_lib.MockTestCase, cros_test_lib.LoggingTestCase):
+  """Test the command line interface."""
+
+  _X86_PREFLIGHT = 'x86-generic-paladin'
+  _BUILD_ROOT = '/b/test_build1'
+
+  def setUp(self):
+    self.parser = cbuildbot._CreateParser()
+    self.site_config = config_lib_unittest.MockSiteConfig()
+
+  def assertDieSysExit(self, *args, **kwargs):
+    self.assertRaises(cros_build_lib.DieSystemExit, *args, **kwargs)
+
+  def testDepotTools(self):
+    """Test that the entry point used by depot_tools works."""
+    path = os.path.join(constants.SOURCE_ROOT, 'chromite', 'bin', 'cbuildbot')
+
+    # Verify the tests below actually are testing correct behaviour;
+    # specifically that it doesn't always just return 0.
+    self.assertRaises(cros_build_lib.RunCommandError,
+                      cros_build_lib.RunCommand,
+                      ['cbuildbot', '--monkeys'], cwd=constants.SOURCE_ROOT)
+
+    # Validate depot_tools lookup.
+    cros_build_lib.RunCommand(
+        ['cbuildbot', '--help'], cwd=constants.SOURCE_ROOT, capture_output=True)
+
+    # Validate buildbot invocation pathway.
+    cros_build_lib.RunCommand(
+        [path, '--help'], cwd=constants.SOURCE_ROOT, capture_output=True)
+
+  def testDebugBuildBotSetByDefault(self):
+    """Test that debug and buildbot flags are set by default."""
+    args = ['--local', '-r', self._BUILD_ROOT, self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertTrue(options.debug)
+    self.assertFalse(options.buildbot)
+
+  def testBuildBotOption(self):
+    """Test that --buildbot option unsets debug flag."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertFalse(options.debug)
+    self.assertTrue(options.buildbot)
+
+  def testBuildBotWithDebugOption(self):
+    """Test that --debug option overrides --buildbot option."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', '--debug',
+            self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertTrue(options.debug)
+    self.assertTrue(options.buildbot)
+
+  def testLocalTrybotWithSpacesInPatches(self):
+    """Test that we handle spaces in patch arguments."""
+    args = ['-r', self._BUILD_ROOT, '--remote', '--local-patches',
+            ' proj:br \t  proj2:b2 ',
+            self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.local_patches, ['proj:br', 'proj2:b2'])
+
+  def testBuildBotWithRemotePatches(self):
+    """Test that --buildbot errors out with patches."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', '-g', '1234',
+            self._X86_PREFLIGHT]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testRemoteBuildBotWithRemotePatches(self):
+    """Test that --buildbot and --remote errors out with patches."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', '--remote', '-g', '1234',
+            self._X86_PREFLIGHT]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildbotDebugWithPatches(self):
+    """Test we can test patches with --buildbot --debug."""
+    args = ['--remote', '-g', '1234', '--debug', '--buildbot',
+            self._X86_PREFLIGHT]
+    cbuildbot._ParseCommandLine(self.parser, args)
+
+  def testBuildBotWithoutProfileOption(self):
+    """Test that no --profile option gets defaulted."""
+    args = ['--buildbot', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.profile, None)
+
+  def testBuildBotWithProfileOption(self):
+    """Test that --profile option gets parsed."""
+    args = ['--buildbot', '--profile', 'carp', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.profile, 'carp')
+
+  def testValidateClobberUserDeclines_1(self):
+    """Test case where user declines in prompt."""
+    self.PatchObject(os.path, 'exists', return_value=True)
+    self.PatchObject(cros_build_lib, 'GetInput', return_value='No')
+    self.assertFalse(commands.ValidateClobber(self._BUILD_ROOT))
+
+  def testValidateClobberUserDeclines_2(self):
+    """Test case where user does not enter the full 'yes' pattern."""
+    self.PatchObject(os.path, 'exists', return_value=True)
+    m = self.PatchObject(cros_build_lib, 'GetInput', side_effect=['asdf', 'No'])
+    self.assertFalse(commands.ValidateClobber(self._BUILD_ROOT))
+    self.assertEqual(m.call_count, 2)
+
+  def testValidateClobberProtectRunningChromite(self):
+    """User should not be clobbering our own source."""
+    cwd = os.path.dirname(os.path.realpath(__file__))
+    buildroot = os.path.dirname(cwd)
+    self.assertDieSysExit(commands.ValidateClobber, buildroot)
+
+  def testValidateClobberProtectRoot(self):
+    """User should not be clobbering /"""
+    self.assertDieSysExit(commands.ValidateClobber, '/')
+
+  def testBuildBotWithBadChromeRevOption(self):
+    """chrome_rev can't be passed an invalid option after chrome_root."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_root=.',
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        self._X86_PREFLIGHT,
+    ]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildBotWithBadChromeRootOption(self):
+    """chrome_root can't get passed after non-local chrome_rev."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_root=.',
+        self._X86_PREFLIGHT,
+    ]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildBotWithBadChromeRevOptionLocal(self):
+    """chrome_rev can't be local without chrome_root."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_rev=%s' % constants.CHROME_REV_LOCAL,
+        self._X86_PREFLIGHT,
+    ]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildBotWithGoodChromeRootOption(self):
+    """chrome_root can be set without chrome_rev."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_root=.',
+        self._X86_PREFLIGHT,
+    ]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.chrome_rev, constants.CHROME_REV_LOCAL)
+    self.assertNotEquals(options.chrome_root, None)
+
+  def testBuildBotWithGoodChromeRevAndRootOption(self):
+    """chrome_rev can get reset around chrome_root."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_rev=%s' % constants.CHROME_REV_LATEST,
+        '--chrome_rev=%s' % constants.CHROME_REV_STICKY,
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_rev=%s' % constants.CHROME_REV_STICKY,
+        '--chrome_rev=%s' % constants.CHROME_REV_LATEST,
+        '--chrome_rev=%s' % constants.CHROME_REV_LOCAL,
+        '--chrome_root=.',
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_rev=%s' % constants.CHROME_REV_LOCAL,
+        self._X86_PREFLIGHT,
+    ]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.chrome_rev, constants.CHROME_REV_LOCAL)
+    self.assertNotEquals(options.chrome_root, None)
+
+  def testPassThroughOptions(self):
+    """Test we are building up pass-through list properly."""
+    args = ['--remote', '-g', '1234', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+
+    self.assertEquals(options.pass_through_args, ['-g', '1234'])
+
+  def testDebugPassThrough(self):
+    """Test we are passing --debug through."""
+    args = ['--remote', '--debug', '--buildbot', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.pass_through_args, ['--debug', '--buildbot'])
+
+  def testCreateBranch(self):
+    """Test a normal create branch run."""
+    args = ['--branch-name', 'refs/heads/test', constants.BRANCH_UTIL_CONFIG]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testCreateBranchNoVersion(self):
+    """Test we require --version with branch-util."""
+    with cros_test_lib.LoggingCapturer() as logger:
+      args = [constants.BRANCH_UTIL_CONFIG]
+      self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+      self.AssertLogsContain(logger, '--branch-name')
+
+  def testCreateBranchDelete(self):
+    """Test we don't require --version with --delete."""
+    args = ['--delete-branch', '--branch-name', 'refs/heads/test',
+            constants.BRANCH_UTIL_CONFIG]
+    cbuildbot._ParseCommandLine(self.parser, args)
+
+  def testBranchOptionsWithoutBranchConfig(self):
+    """Error out when branch options passed in without branch-util config."""
+    for extra_args in [['--delete-branch'],
+                       ['--branch-name', 'refs/heads/test'],
+                       ['--rename-to', 'abc']]:
+      with cros_test_lib.LoggingCapturer() as logger:
+        args = [self._X86_PREFLIGHT] + extra_args
+        self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+        self.AssertLogsContain(logger, 'Cannot specify')
+
+
+class FullInterfaceTest(cros_test_lib.MockTempDirTestCase):
+  """Tests that run the cbuildbot.main() function directly.
+
+  Note this explicitly suppresses automatic VerifyAll() calls; thus if you want
+  that checked, you have to invoke it yourself.
+  """
+
+  def MakeTestRootDir(self, relpath):
+    abspath = os.path.join(self.root, relpath)
+    osutils.SafeMakedirs(abspath)
+    return abspath
+
+  def setUp(self):
+    self.root = self.tempdir
+    self.buildroot = self.MakeTestRootDir('build_root')
+    self.sourceroot = self.MakeTestRootDir('source_root')
+    self.trybot_root = self.MakeTestRootDir('trybot')
+    self.trybot_internal_root = self.MakeTestRootDir('trybot-internal')
+    self.external_marker = os.path.join(self.trybot_root, '.trybot')
+    self.internal_marker = os.path.join(self.trybot_internal_root, '.trybot')
+
+    osutils.SafeMakedirs(os.path.join(self.sourceroot, '.repo', 'manifests'))
+    osutils.SafeMakedirs(os.path.join(self.sourceroot, '.repo', 'repo'))
+
+    # Stub out all relevant methods regardless of whether they are called in the
+    # specific test case.
+    self.PatchObject(optparse.OptionParser, 'error',
+                     side_effect=TestArgsparseError())
+    self.PatchObject(argparse.ArgumentParser, 'error',
+                     side_effect=TestArgsparseError())
+    self.inchroot_mock = self.PatchObject(cros_build_lib, 'IsInsideChroot',
+                                          return_value=False)
+    self.input_mock = self.PatchObject(cros_build_lib, 'GetInput',
+                                       side_effect=Exception())
+    self.PatchObject(cbuildbot, '_RunBuildStagesWrapper', return_value=True)
+
+  def assertMain(self, args, common_options=True):
+    if common_options:
+      # Suppress cgroups code.  For cbuildbot invocation, it doesn't hugely
+      # care about cgroups- that's a blackbox to it.  As such these unittests
+      # should not be sensitive to it.
+      args.extend(['--sourceroot', self.sourceroot, '--nocgroups',
+                   '--notee'])
+    return cbuildbot.main(args)
+
+  def testNullArgsStripped(self):
+    """Test that null args are stripped out and don't cause error."""
+    self.assertMain(['--local', '-r', self.buildroot, '', '',
+                     'x86-generic-paladin'])
+
+  def testMultipleConfigsError(self):
+    """Test that multiple configs cause error if --remote is not used."""
+    self.assertRaises(cros_build_lib.DieSystemExit, self.assertMain,
+                      ['--local',
+                       '-r', self.buildroot,
+                       'arm-generic-paladin',
+                       'x86-generic-paladin'])
+
+  def testDontInferBuildrootForBuildBotRuns(self):
+    """Test that we don't infer buildroot if run with --buildbot option."""
+    self.assertRaises(TestArgsparseError, self.assertMain,
+                      ['--buildbot', 'x86-generic-paladin'])
+
+  def testInferExternalBuildRoot(self):
+    """Test that we default to correct buildroot for external config."""
+    self.PatchObject(cbuildbot, '_ConfirmBuildRoot',
+                     side_effect=TestHaltedException())
+    self.assertRaises(TestHaltedException, self.assertMain,
+                      ['--local', 'x86-generic-paladin'])
+
+  def testInferInternalBuildRoot(self):
+    """Test that we default to correct buildroot for internal config."""
+    self.PatchObject(cbuildbot, '_ConfirmBuildRoot',
+                     side_effect=TestHaltedException())
+    self.assertRaises(TestHaltedException, self.assertMain,
+                      ['--local', 'x86-mario-paladin'])
+
+  def testInferBuildRootPromptNo(self):
+    """Test that a 'no' answer on the prompt halts execution."""
+    self.input_mock.side_effect = None
+    self.input_mock.return_value = 'no'
+    self.assertRaises(SystemExit, self.assertMain,
+                      ['--local', 'x86-generic-paladin'])
+
+  def testInferBuildRootExists(self):
+    """Test that we don't prompt the user if buildroot already exists."""
+    osutils.Touch(self.external_marker)
+    os.utime(self.external_marker, None)
+    self.assertMain(['--local', 'x86-generic-paladin'])
+
+  def testBuildbotDiesInChroot(self):
+    """Buildbot should quit if run inside a chroot."""
+    self.inchroot_mock.return_value = True
+    self.assertRaises(cros_build_lib.DieSystemExit, self.assertMain,
+                      ['--local', '-r', self.buildroot, 'x86-generic-paladin'])
diff --git a/cbuildbot/chromeos_config.py b/cbuildbot/chromeos_config.py
new file mode 100644
index 0000000..77131d0
--- /dev/null
+++ b/cbuildbot/chromeos_config.py
@@ -0,0 +1,2894 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration options for various cbuildbot builders."""
+
+from __future__ import print_function
+
+import copy
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import factory
+
+
+# Set to 'True' if this is a release branch. This updates the '-release' builder
+# configuration to the shape used by the release waterfall.
+IS_RELEASE_BRANCH = False
+
+
+def OverrideConfigForTrybot(build_config, options):
+  """Apply trybot-specific configuration settings.
+
+  Args:
+    build_config: The build configuration dictionary to override.
+      The dictionary is not modified.
+    options: The options passed on the commandline.
+
+  Returns:
+    A build configuration dictionary with the overrides applied.
+  """
+  copy_config = copy.deepcopy(build_config)
+  for my_config in [copy_config] + copy_config['child_configs']:
+    # Force uprev. This is so patched in changes are always
+    # built.
+    my_config['uprev'] = True
+    if my_config['internal']:
+      my_config['overlays'] = constants.BOTH_OVERLAYS
+
+    # Use the local manifest which only requires elevated access if it's really
+    # needed to build.
+    if not options.remote_trybot:
+      my_config['manifest'] = my_config['dev_manifest']
+
+    my_config['push_image'] = False
+
+    if my_config['build_type'] != constants.PAYLOADS_TYPE:
+      my_config['paygen'] = False
+
+    if options.hwtest and my_config['hw_tests_override'] is not None:
+      my_config['hw_tests'] = my_config['hw_tests_override']
+
+    # Default to starting with a fresh chroot on remote trybot runs.
+    if options.remote_trybot:
+      my_config['chroot_replace'] = True
+
+    # In trybots, we want to always run VM tests and all unit tests, so that
+    # developers will get better testing for their changes.
+    if my_config['vm_tests_override'] is not None:
+      my_config['vm_tests'] = my_config['vm_tests_override']
+
+  return copy_config
+
+
+def GetDefaultWaterfall(build_config):
+  if not (build_config['important'] or build_config['master']):
+    return None
+  if build_config['branch']:
+    return None
+  b_type = build_config['build_type']
+
+  if config_lib.IsCanaryType(b_type):
+    # If this is a canary build, it may fall on different waterfalls:
+    # - If we're building for a release branch, it belongs on a release
+    #   waterfall.
+    # - Otherwise, it belongs on the internal waterfall.
+    if IS_RELEASE_BRANCH:
+      return constants.WATERFALL_RELEASE
+    else:
+      return constants.WATERFALL_INTERNAL
+  elif config_lib.IsCQType(b_type):
+    # A Paladin can appear on the public or internal waterfall depending on its
+    # 'internal' status.
+    return (constants.WATERFALL_INTERNAL if build_config['internal'] else
+            constants.WATERFALL_EXTERNAL)
+  elif config_lib.IsPFQType(b_type) or b_type == constants.PRE_CQ_LAUNCHER_TYPE:
+    # These builder types belong on the internal waterfall.
+    return constants.WATERFALL_INTERNAL
+  else:
+    # No default active waterfall.
+    return None
+
+
+class HWTestList(object):
+  """Container for methods to generate HWTest lists."""
+
+  @classmethod
+  def DefaultList(cls, **kwargs):
+    """Returns a default list of HWTestConfig's for a build
+
+    Args:
+      *kwargs: overrides for the configs
+    """
+    # Number of tests running in parallel in the AU suite.
+    AU_TESTS_NUM = 2
+    # Number of tests running in parallel in the asynchronous canary
+    # test suite
+    ASYNC_TEST_NUM = 2
+
+    # Set the number of machines for the au and qav suites. If we are
+    # constrained in the number of duts in the lab, only give 1 dut to each.
+    if (kwargs.get('num', constants.HWTEST_DEFAULT_NUM) >=
+        constants.HWTEST_DEFAULT_NUM):
+      au_dict = dict(num=AU_TESTS_NUM)
+      async_dict = dict(num=ASYNC_TEST_NUM)
+    else:
+      au_dict = dict(num=1)
+      async_dict = dict(num=1)
+
+    au_kwargs = kwargs.copy()
+    au_kwargs.update(au_dict)
+
+    async_kwargs = kwargs.copy()
+    async_kwargs.update(async_dict)
+    async_kwargs['priority'] = constants.HWTEST_POST_BUILD_PRIORITY
+    async_kwargs['retry'] = False
+    async_kwargs['max_retries'] = None
+    async_kwargs['async'] = True
+    async_kwargs['suite_min_duts'] = 1
+
+    # BVT + AU suite.
+    return [config_lib.HWTestConfig(constants.HWTEST_BVT_SUITE,
+                                    blocking=True, **kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_AU_SUITE,
+                                    blocking=True, **au_kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_COMMIT_SUITE,
+                                    **async_kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_CANARY_SUITE,
+                                    **async_kwargs)]
+
+  @classmethod
+  def DefaultListCanary(cls, **kwargs):
+    """Returns a default list of config_lib.HWTestConfig's for a canary build.
+
+    Args:
+      *kwargs: overrides for the configs
+    """
+    # Set minimum_duts default to 4, which means that lab will check the
+    # number of available duts to meet the minimum requirement before creating
+    # the suite job for canary builds.
+    kwargs.setdefault('minimum_duts', 4)
+    kwargs.setdefault('file_bugs', True)
+    return HWTestList.DefaultList(**kwargs)
+
+  @classmethod
+  def AFDOList(cls, **kwargs):
+    """Returns a default list of HWTestConfig's for a AFDO build.
+
+    Args:
+      *kwargs: overrides for the configs
+    """
+    afdo_dict = dict(pool=constants.HWTEST_SUITES_POOL,
+                     timeout=120 * 60, num=1, async=True, retry=False,
+                     max_retries=None)
+    afdo_dict.update(kwargs)
+    return [config_lib.HWTestConfig('perf_v2', **afdo_dict)]
+
+  @classmethod
+  def DefaultListNonCanary(cls, **kwargs):
+    """Return a default list of HWTestConfig's for a non-canary build.
+
+    Optional arguments may be overridden in `kwargs`, except that
+    the `blocking` setting cannot be provided.
+    """
+    return [config_lib.HWTestConfig(constants.HWTEST_BVT_SUITE, **kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_COMMIT_SUITE, **kwargs)]
+
+  @classmethod
+  def DefaultListCQ(cls, **kwargs):
+    """Return a default list of HWTestConfig's for a CQ build.
+
+    Optional arguments may be overridden in `kwargs`, except that
+    the `blocking` setting cannot be provided.
+    """
+    default_dict = dict(pool=constants.HWTEST_PALADIN_POOL, timeout=120 * 60,
+                        file_bugs=False, priority=constants.HWTEST_CQ_PRIORITY,
+                        minimum_duts=4, offload_failures_only=True)
+    # Allows kwargs overrides to default_dict for cq.
+    default_dict.update(kwargs)
+    return HWTestList.DefaultListNonCanary(**default_dict)
+
+  @classmethod
+  def DefaultListPFQ(cls, **kwargs):
+    """Return a default list of HWTestConfig's for a PFQ build.
+
+    Optional arguments may be overridden in `kwargs`, except that
+    the `blocking` setting cannot be provided.
+    """
+    default_dict = dict(pool=constants.HWTEST_PFQ_POOL, file_bugs=True,
+                        priority=constants.HWTEST_PFQ_PRIORITY,
+                        retry=False, max_retries=None, minimum_duts=4)
+    # Allows kwargs overrides to default_dict for pfq.
+    default_dict.update(kwargs)
+    return HWTestList.DefaultListNonCanary(**default_dict)
+
+  @classmethod
+  def SharedPoolPFQ(cls, **kwargs):
+    """Return a list of HWTestConfigs for PFQ which uses a shared pool.
+
+    The returned suites will run in pool:critical by default, which is
+    shared with other types of builders (canaries, cq). The first suite in the
+    list is a blocking sanity suite that verifies the build will not break dut.
+    """
+    sanity_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                       file_bugs=True, priority=constants.HWTEST_PFQ_PRIORITY,
+                       retry=False, max_retries=None)
+    sanity_dict.update(kwargs)
+    sanity_dict.update(dict(num=1, minimum_duts=1, suite_min_duts=1,
+                            blocking=True))
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        suite_min_duts=3)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.HWTEST_SANITY_SUITE,
+                                          **sanity_dict)]
+    suite_list.extend(HWTestList.DefaultListPFQ(**default_dict))
+    return suite_list
+
+  @classmethod
+  def SharedPoolCQ(cls, **kwargs):
+    """Return a list of HWTestConfigs for CQ which uses a shared pool.
+
+    The returned suites will run in pool:critical by default, which is
+    shared with other types of builder (canaries, pfq). The first suite in the
+    list is a blocking sanity suite that verifies the build will not break dut.
+    """
+    sanity_dict = dict(pool=constants.HWTEST_MACH_POOL, timeout=120 * 60,
+                       file_bugs=False, priority=constants.HWTEST_CQ_PRIORITY)
+    sanity_dict.update(kwargs)
+    sanity_dict.update(dict(num=1, minimum_duts=1, suite_min_duts=1,
+                            blocking=True))
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        suite_min_duts=10)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.HWTEST_SANITY_SUITE,
+                                          **sanity_dict)]
+    suite_list.extend(HWTestList.DefaultListCQ(**default_dict))
+    return suite_list
+
+  @classmethod
+  def SharedPoolCanary(cls, **kwargs):
+    """Return a list of HWTestConfigs for Canary which uses a shared pool.
+
+    The returned suites will run in pool:critical by default, which is
+    shared with CQs. The first suite in the list is a blocking sanity suite
+    that verifies the build will not break dut.
+    """
+    sanity_dict = dict(pool=constants.HWTEST_MACH_POOL, file_bugs=True)
+    sanity_dict.update(kwargs)
+    sanity_dict.update(dict(num=1, minimum_duts=1, suite_min_duts=1,
+                            blocking=True))
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        suite_min_duts=6)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.HWTEST_SANITY_SUITE,
+                                          **sanity_dict)]
+    suite_list.extend(HWTestList.DefaultListCanary(**default_dict))
+    return suite_list
+
+  @classmethod
+  def AFDORecordTest(cls, **kwargs):
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        warn_only=True, num=1, file_bugs=True,
+                        timeout=constants.AFDO_GENERATE_TIMEOUT,
+                        priority=constants.HWTEST_PFQ_PRIORITY)
+    # Allows kwargs overrides to default_dict for cq.
+    default_dict.update(kwargs)
+    return config_lib.HWTestConfig(constants.HWTEST_AFDO_SUITE, **default_dict)
+
+  @classmethod
+  def WiFiCellPoolPreCQ(cls, **kwargs):
+    """Return a list of HWTestConfigs which run wifi tests.
+
+    This should be used by the ChromeOS WiFi team to ensure changes pass the
+    wifi tests as a pre-cq sanity check.
+    """
+    default_dict = dict(pool=constants.HWTEST_WIFICELL_PRE_CQ_POOL,
+                        blocking=True, file_bugs=False,
+                        priority=constants.HWTEST_DEFAULT_PRIORITY,
+                        retry=False, max_retries=None, minimum_duts=1)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.WIFICELL_PRE_CQ,
+                                          **default_dict)]
+    return suite_list
+
+def append_useflags(useflags):
+  """Used to append a set of useflags to existing useflags.
+
+  Useflags that shadow prior use flags will cause the prior flag to be removed.
+  (e.g. appending '-foo' to 'foo' will cause 'foo' to be removed)
+
+  Usage:
+    new_config = base_config.derive(useflags=append_useflags(['foo', '-bar'])
+
+  Args:
+    useflags: List of string useflags to append.
+  """
+  assert isinstance(useflags, (list, set))
+  shadowed_useflags = {'-' + flag for flag in useflags
+                       if not flag.startswith('-')}
+  shadowed_useflags.update({flag[1:] for flag in useflags
+                            if flag.startswith('-')})
+  def handler(old_useflags):
+    new_useflags = set(old_useflags or [])
+    new_useflags.update(useflags)
+    new_useflags.difference_update(shadowed_useflags)
+    return sorted(list(new_useflags))
+
+  return handler
+
+
+TRADITIONAL_VM_TESTS_SUPPORTED = [constants.SMOKE_SUITE_TEST_TYPE,
+                                  constants.SIMPLE_AU_TEST_TYPE,
+                                  constants.CROS_VM_TEST_TYPE]
+
+#
+# Define assorted constants describing various sets of boards.
+#
+
+# Base per-board configuration.
+# Every board must appear in exactly 1 of the following sets.
+
+_arm_internal_release_boards = frozenset([
+    'arkham',
+    'beaglebone',
+    'beaglebone_servo',
+    'daisy',
+    'daisy_skate',
+    'daisy_spring',
+    'daisy_winter',
+    'kayle',
+    'nyan',
+    'nyan_big',
+    'nyan_blaze',
+    'nyan_freon',
+    'nyan_kitty',
+    'oak',
+    'peach_pi',
+    'peach_pit',
+    'purin',
+    'smaug',
+    'storm',
+    'rush',
+    'rush_ryu',
+    'veyron_brain',
+    'veyron_danger',
+    'veyron_gus',
+    'veyron_jaq',
+    'veyron_jerry',
+    'veyron_mickey',
+    'veyron_mighty',
+    'veyron_minnie',
+    'veyron_pinky',
+    'veyron_rialto',
+    'veyron_romy',
+    'veyron_shark',
+    'veyron_speedy',
+    'veyron_thea',
+    'whirlwind',
+])
+
+_arm_external_boards = frozenset([
+    'arm-generic',
+    'arm-generic_freon',
+    'arm64-generic',
+])
+
+_x86_internal_release_boards = frozenset([
+    'auron',
+    'auron_paine',
+    'auron_yuna',
+    'bayleybay',
+    'banjo',
+    'beltino',
+    'bobcat',
+    'buddy',
+    'butterfly',
+    'candy',
+    'celes',
+    'cid',
+    'clapper',
+    'cranky',
+    'cyan',
+    'enguarde',
+    'expresso',
+    'falco',
+    'falco_li',
+    'gandof',
+    'glados',
+    'glimmer',
+    'gnawty',
+    'guado',
+    'guado_moblab',
+    'heli',
+    'jecht',
+    'kip',
+    'kunimitsu',
+    'lakitu',
+    'lakitu_mobbuild',
+    'leon',
+    'link',
+    'lulu',
+    'lumpy',
+    'mccloud',
+    'monroe',
+    'ninja',
+    'orco',
+    'panther',
+    'panther_embedded',
+    'panther_moblab',
+    'parrot',
+    'parrot_ivb',
+    'parry',
+    'peppy',
+    'quawks',
+    'rambi',
+    'rikku',
+    'samus',
+    'slippy',
+    'squawks',
+    'stout',
+    'strago',
+    'stumpy',
+    'stumpy_moblab',
+    'sumo',
+    'swanky',
+    'tidus',
+    'tricky',
+    'ultima',
+    'winky',
+    'wizpig',
+    'wolf',
+    'x86-alex',
+    'x86-alex_he',
+    'x86-mario',
+    'x86-zgb',
+    'x86-zgb_he',
+    'zako',
+])
+
+_x86_external_boards = frozenset([
+    'amd64-generic',
+    'amd64-generic_freon',
+    'gizmo',
+    'x32-generic',
+    'x86-generic',
+    'x86-generic_freon',
+])
+
+# Every board should be in only 1 of the above sets.
+_distinct_board_sets = [
+    _arm_internal_release_boards,
+    _arm_external_boards,
+    _x86_internal_release_boards,
+    _x86_external_boards,
+]
+
+_arm_full_boards = (_arm_internal_release_boards |
+                    _arm_external_boards)
+_x86_full_boards = (_x86_internal_release_boards |
+                    _x86_external_boards)
+
+_arm_boards = _arm_full_boards
+_x86_boards = _x86_full_boards
+
+_all_release_boards = (
+    _arm_internal_release_boards |
+    _x86_internal_release_boards
+)
+_all_full_boards = (
+    _arm_full_boards |
+    _x86_full_boards
+)
+_all_boards = (
+    _x86_boards |
+    _arm_boards
+)
+
+_arm_release_boards = _arm_internal_release_boards
+_x86_release_boards = _x86_internal_release_boards
+
+_internal_boards = _all_release_boards
+
+# Board can appear in 1 or more of the following sets.
+_brillo_boards = frozenset([
+    'arkham',
+    'gizmo',
+    'kayle',
+    'lakitu',
+    'lakitu_mobbuild',
+    'panther_embedded',
+    'purin',
+    'storm',
+    'whirlwind',
+])
+
+_moblab_boards = frozenset([
+    'stumpy_moblab',
+    'panther_moblab',
+    'guado_moblab',
+])
+
+_minimal_profile_boards = frozenset([
+    'bobcat',
+])
+
+_nofactory_boards = frozenset([
+    'daisy_winter',
+    'smaug',
+])
+
+_toolchains_from_source = frozenset([
+    'x32-generic',
+])
+
+_noimagetest_boards = frozenset([
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_nohwqual_boards = frozenset([
+    'kayle',
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_norootfs_verification_boards = frozenset([
+])
+
+_base_layout_boards = frozenset([
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_no_unittest_boards = frozenset((
+))
+
+_upload_gce_images_boards = frozenset([
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_no_vmtest_boards = _arm_boards | _brillo_boards
+
+
+# This is a list of configs that should be included on the main waterfall, but
+# aren't included by default (see IsDefaultMainWaterfall). This loosely
+# corresponds to the set of experimental or self-standing configs.
+_waterfall_config_map = {
+    constants.WATERFALL_EXTERNAL: frozenset([
+        # Experimental Paladins
+        'amd64-generic_freon-paladin',
+
+        # Incremental
+        'amd64-generic-incremental',
+        'daisy-incremental',
+        'x86-generic-incremental',
+
+        # Full
+        'amd64-generic-full',
+        'arm-generic-full',
+        'daisy-full',
+        'oak-full',
+        'x86-generic-full',
+
+        # ASAN
+        'amd64-generic-asan',
+        'x86-generic-asan',
+
+        # Utility
+        'chromiumos-sdk',
+        'refresh-packages',
+
+        # LLVM
+        'amd64-generic-llvm',
+    ]),
+
+    constants.WATERFALL_INTERNAL: frozenset([
+        # Experimental Paladins.
+        'panther_moblab-paladin',
+        'stumpy_moblab-paladin',
+
+        # Experimental Canaries (Group)
+        'storm-release-group',
+        'strago-release-group',
+        'veyron-c-release-group',
+
+        # Experimental Canaries
+        'bobcat-release',
+        'daisy_winter-release',
+        'kayle-release',
+        'nyan_freon-release',
+        'panther_moblab-release',
+        'rush_ryu-release',
+        'smaug-release',
+        'guado_moblab-release',
+
+        # Incremental Builders.
+        'mario-incremental',
+        'lakitu-incremental',
+
+        # Firmware Builders.
+        'link-depthcharge-full-firmware',
+
+        # Toolchain Builders.
+        'internal-toolchain-major',
+        'internal-toolchain-minor',
+    ]),
+
+    constants.WATERFALL_RELEASE: frozenset([
+    ]),
+}
+
+
+@factory.CachedFunctionCall
+def GetConfig():
+  # Chrome OS site parameters.
+  site_params = config_lib.DefaultSiteParameters()
+
+  # Helpers for constructing Chrome OS site parameters.
+  manifest_project = 'chromiumos/manifest'
+  manifest_int_project = 'chromeos/manifest-internal'
+  external_remote = 'cros'
+  internal_remote = 'cros-internal'
+  kayle_internal_remote = 'kayle-cros-internal'
+  chromium_remote = 'chromium'
+  chrome_remote = 'chrome'
+  aosp_remote = 'aosp'
+  weave_remote = 'weave'
+
+  # Gerrit instance site parameters.
+  site_params.update(
+      config_lib.GerritInstanceParameters('EXTERNAL', 'chromium'))
+  site_params.update(
+      config_lib.GerritInstanceParameters('INTERNAL', 'chrome-internal'))
+  site_params.update(
+      config_lib.GerritInstanceParameters('AOSP', 'android'))
+  site_params.update(
+      config_lib.GerritInstanceParameters('WEAVE', 'weave'))
+
+  site_params.update(
+      # Parameters to define which manifests to use.
+      MANIFEST_PROJECT=manifest_project,
+      MANIFEST_INT_PROJECT=manifest_int_project,
+      MANIFEST_PROJECTS=(manifest_project, manifest_int_project),
+      MANIFEST_URL='%s/%s' % (
+          site_params['EXTERNAL_GOB_URL'], manifest_project
+      ),
+      MANIFEST_INT_URL='%s/%s' % (
+          site_params['INTERNAL_GERRIT_URL'], manifest_int_project
+      ),
+
+      # CrOS remotes specified in the manifests.
+      EXTERNAL_REMOTE=external_remote,
+      INTERNAL_REMOTE=internal_remote,
+      GOB_REMOTES={
+          site_params['EXTERNAL_GOB_INSTANCE']: external_remote,
+          site_params['INTERNAL_GOB_INSTANCE']: internal_remote
+      },
+      KAYLE_INTERNAL_REMOTE=kayle_internal_remote,
+      CHROMIUM_REMOTE=chromium_remote,
+      CHROME_REMOTE=chrome_remote,
+      AOSP_REMOTE=aosp_remote,
+      WEAVE_REMOTE=weave_remote,
+
+      # Only remotes listed in CROS_REMOTES are considered branchable.
+      # CROS_REMOTES and BRANCHABLE_PROJECTS must be kept in sync.
+      GERRIT_HOSTS={
+          external_remote: site_params['EXTERNAL_GERRIT_HOST'],
+          internal_remote: site_params['INTERNAL_GERRIT_HOST'],
+          aosp_remote: site_params['AOSP_GERRIT_HOST'],
+          weave_remote: site_params['WEAVE_GERRIT_HOST']
+      },
+      CROS_REMOTES={
+          external_remote: site_params['EXTERNAL_GOB_URL'],
+          internal_remote: site_params['INTERNAL_GOB_URL'],
+          kayle_internal_remote: site_params['INTERNAL_GOB_URL'],
+          aosp_remote: site_params['AOSP_GOB_URL'],
+          weave_remote: site_params['WEAVE_GOB_URL']
+      },
+      GIT_REMOTES={
+          chromium_remote: site_params['EXTERNAL_GOB_URL'],
+          chrome_remote: site_params['INTERNAL_GOB_URL'],
+          external_remote: site_params['EXTERNAL_GOB_URL'],
+          internal_remote: site_params['INTERNAL_GOB_URL'],
+          kayle_internal_remote: site_params['INTERNAL_GOB_URL'],
+          aosp_remote: site_params['AOSP_GOB_URL'],
+          weave_remote: site_params['WEAVE_GOB_URL']
+      },
+
+      # Prefix to distinguish internal and external changes. This is used
+      # when a user specifies a patch with "-g", when generating a key for
+      # a patch to use in our PatchCache, and when displaying a custom
+      # string for the patch.
+      CHANGE_PREFIX={
+          external_remote: site_params['EXTERNAL_CHANGE_PREFIX'],
+          internal_remote: site_params['INTERNAL_CHANGE_PREFIX'],
+      },
+
+      # List of remotes that are okay to include in the external manifest.
+      EXTERNAL_REMOTES=(
+          external_remote, chromium_remote
+      ),
+
+      # Mapping 'remote name' -> regexp that matches names of repositories on
+      # that remote that can be branched when creating CrOS branch.
+      # Branching script will actually create a new git ref when branching
+      # these projects. It won't attempt to create a git ref for other projects
+      # that may be mentioned in a manifest. If a remote is missing from this
+      # dictionary, all projects on that remote are considered to not be
+      # branchable.
+      BRANCHABLE_PROJECTS={
+          external_remote: r'chromiumos/(.+)',
+          internal_remote: r'chromeos/(.+)',
+          kayle_internal_remote: r'chromeos/(.+)'
+      },
+
+      # Additional parameters used to filter manifests, create modified
+      # manifests, and to branch manifests.
+      MANIFEST_VERSIONS_GOB_URL=(
+          '%s/chromiumos/manifest-versions' % site_params['EXTERNAL_GOB_URL']
+      ),
+      MANIFEST_VERSIONS_INT_GOB_URL=(
+          '%s/chromeos/manifest-versions' % site_params['INTERNAL_GOB_URL']
+      ),
+      MANIFEST_VERSIONS_GOB_URL_TEST=(
+          '%s/chromiumos/manifest-versions-test' % (
+              site_params['EXTERNAL_GOB_URL']
+          )
+      ),
+      MANIFEST_VERSIONS_INT_GOB_URL_TEST=(
+          '%s/chromeos/manifest-versions-test' % site_params['INTERNAL_GOB_URL']
+      ),
+      MANIFEST_VERSIONS_GS_URL='gs://chromeos-manifest-versions',
+
+      # Standard directories under buildroot for cloning these repos.
+      EXTERNAL_MANIFEST_VERSIONS_PATH='manifest-versions',
+      INTERNAL_MANIFEST_VERSIONS_PATH='manifest-versions-internal',
+
+      # URL of the repo project.
+      REPO_URL='%s/external/repo' % site_params['EXTERNAL_GOB_URL']
+  )
+
+  # Site specific adjustments for default BuildConfig values.
+  defaults = config_lib.DefaultSettings()
+
+  # Git repository URL for our manifests.
+  #  https://chromium.googlesource.com/chromiumos/manifest
+  #  https://chrome-internal.googlesource.com/chromeos/manifest-internal
+  defaults['manifest_repo_url'] = site_params['MANIFEST_URL']
+
+  # Site configuration.
+  site_config = config_lib.SiteConfig(defaults=defaults,
+                                      site_params=site_params)
+
+  default_hw_tests_override = config_lib.BuildConfig(
+      hw_tests_override=HWTestList.DefaultList(
+          num=constants.HWTEST_TRYBOT_NUM, pool=constants.HWTEST_TRYBOT_POOL,
+          file_bugs=False),
+  )
+
+  # Arch-specific mixins.
+
+  # Config parameters for builders that do not run tests on the builder.
+  no_unittest_builder = config_lib.BuildConfig(
+      unittests=False,
+  )
+
+  no_vmtest_builder = config_lib.BuildConfig(
+      vm_tests=[],
+      vm_tests_override=None,
+  )
+
+  no_hwtest_builder = config_lib.BuildConfig(
+      hw_tests=[],
+      hw_tests_override=[],
+  )
+
+  # Builder-specific mixins
+
+  config_lib.BuildConfig(
+      # Full builds that build fully from binaries.
+      build_type=constants.BUILD_FROM_SOURCE_TYPE,
+      archive_build_debug=True,
+      images=['test', 'factory_install'],
+      git_sync=True,
+  )
+
+  full = site_config.AddTemplate(
+      'full',
+      default_hw_tests_override,
+      # Full builds are test builds to show that we can build from scratch,
+      # so use settings to build from scratch, and archive the results.
+      usepkg_build_packages=False,
+      chrome_sdk=True,
+
+      build_type=constants.BUILD_FROM_SOURCE_TYPE,
+      archive_build_debug=True,
+      images=['base', 'recovery', 'test', 'factory_install'],
+      git_sync=True,
+      trybot_list=True,
+      description='Full Builds',
+      image_test=True,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Continuous',
+  )
+
+  # Full builders with prebuilts.
+  full_prebuilts = full.derive(
+      prebuilts=constants.PUBLIC,
+  )
+
+  pfq = config_lib.BuildConfig(
+      build_type=constants.PFQ_TYPE,
+      important=True,
+      uprev=True,
+      overlays=constants.PUBLIC_OVERLAYS,
+      manifest_version=True,
+      trybot_list=True,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Chrome-PFQ',
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE,
+                constants.SIMPLE_AU_TEST_TYPE],
+      vm_tests_override=TRADITIONAL_VM_TESTS_SUPPORTED,
+  )
+
+  paladin = site_config.AddTemplate(
+      'paladin',
+      default_hw_tests_override,
+      chroot_replace=False,
+      important=True,
+      build_type=constants.PALADIN_TYPE,
+      overlays=constants.PUBLIC_OVERLAYS,
+      prebuilts=constants.PUBLIC,
+      manifest_version=True,
+      trybot_list=True,
+      description='Commit Queue',
+      upload_standalone_images=False,
+      images=['base', 'test'],
+      image_test=True,
+      chrome_sdk=True,
+      chrome_sdk_build_chrome=False,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#TOC-CQ',
+
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests_override=TRADITIONAL_VM_TESTS_SUPPORTED,
+  )
+
+  # Incremental builders are intended to test the developer workflow.
+  # For that reason, they don't uprev.
+  incremental = site_config.AddTemplate(
+      'incremental',
+      default_hw_tests_override,
+      build_type=constants.INCREMENTAL_TYPE,
+      chroot_replace=False,
+      uprev=False,
+      overlays=constants.PUBLIC_OVERLAYS,
+      description='Incremental Builds',
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Continuous',
+  )
+
+  # This builds with more source available.
+  internal = config_lib.BuildConfig(
+      internal=True,
+      overlays=constants.BOTH_OVERLAYS,
+      manifest_repo_url=site_params['MANIFEST_INT_URL'],
+  )
+
+  brillo = config_lib.BuildConfig(
+      sync_chrome=False,
+      chrome_sdk=False,
+      afdo_use=False,
+      dev_installer_prebuilts=False,
+      # TODO(gauravsh): crbug.com/356414 Start running tests on Brillo configs.
+      vm_tests=[],
+      hw_tests=[],
+  )
+
+  moblab = config_lib.BuildConfig(
+      image_test=False,
+      vm_tests=[],
+  )
+
+  beaglebone = brillo.derive(image_test=False, rootfs_verification=False)
+
+  # This adds Chrome branding.
+  official_chrome = config_lib.BuildConfig(
+      useflags=[constants.USE_CHROME_INTERNAL],
+  )
+
+  # This sets chromeos_official.
+  official = official_chrome.derive(
+      chromeos_official=True,
+  )
+
+  _cros_sdk = site_config.AddConfigWithoutTemplate(
+      'chromiumos-sdk',
+      full_prebuilts,
+      no_hwtest_builder,
+      # The amd64-host has to be last as that is when the toolchains
+      # are bundled up for inclusion in the sdk.
+      boards=[
+          'x86-generic', 'arm-generic', 'amd64-generic'
+      ],
+      build_type=constants.CHROOT_BUILDER_TYPE,
+      builder_class_name='sdk_builders.ChrootSdkBuilder',
+      use_sdk=False,
+      trybot_list=True,
+      description='Build the SDK and all the cross-compilers',
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Continuous',
+  )
+
+  asan = site_config.AddTemplate(
+      'asan',
+      default_hw_tests_override,
+      profile='asan',
+      disk_layout='2gb-rootfs',
+      # TODO(deymo): ASan builders generate bigger files, in particular a bigger
+      # Chrome binary, that update_engine can't handle in delta payloads due to
+      # memory limits. Remove the following lines once crbug.com/329248 is
+      # fixed.
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests_override=None,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-ASAN',
+  )
+
+  llvm = site_config.AddTemplate(
+      'llvm',
+      default_hw_tests_override,
+      profile='llvm',
+      description='Build with LLVM',
+  )
+
+  telemetry = site_config.AddTemplate(
+      'telemetry',
+      default_hw_tests_override,
+      build_type=constants.INCREMENTAL_TYPE,
+      uprev=False,
+      overlays=constants.PUBLIC_OVERLAYS,
+      vm_tests=[constants.TELEMETRY_SUITE_TEST_TYPE],
+      description='Telemetry Builds',
+  )
+
+  chromium_pfq = site_config.AddTemplate(
+      'chromium-pfq',
+      default_hw_tests_override,
+      build_type=constants.CHROME_PFQ_TYPE,
+      important=True,
+      uprev=False,
+      overlays=constants.PUBLIC_OVERLAYS,
+      manifest_version=True,
+      chrome_rev=constants.CHROME_REV_LATEST,
+      chrome_sdk=True,
+      description='Preflight Chromium Uprev & Build (public)',
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE,
+                constants.SIMPLE_AU_TEST_TYPE],
+      vm_tests_override=None,
+  )
+
+  # TODO(davidjames): Convert this to an external config once the unified master
+  # logic is ready.
+  internal_chromium_pfq = internal.derive(
+      chromium_pfq,
+      description='Preflight Chromium Uprev & Build (internal)',
+      overlays=constants.BOTH_OVERLAYS,
+      prebuilts=constants.PUBLIC,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Chrome-PFQ',
+  )
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'master-chromium-pfq',
+      boards=[],
+      master=True,
+      binhost_test=True,
+      push_overlays=constants.BOTH_OVERLAYS,
+      afdo_update_ebuild=True,
+      chrome_sdk=False,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree',
+                               'chrome'],
+  )
+
+  chrome_pfq = site_config.AddTemplate(
+      'chrome-pfq',
+      internal_chromium_pfq,
+      official,
+      important=True,
+      overlays=constants.BOTH_OVERLAYS,
+      description='Preflight Chrome Uprev & Build (internal)',
+      prebuilts=constants.PRIVATE,
+  )
+
+  chrome_try = config_lib.BuildConfig(
+      build_type=constants.CHROME_PFQ_TYPE,
+      chrome_rev=constants.CHROME_REV_TOT,
+      use_lkgm=True,
+      important=False,
+      manifest_version=False,
+  )
+
+  chromium_info = site_config.AddTemplate(
+      'chromium-pfq-informational',
+      chromium_pfq,
+      chrome_try,
+      chrome_sdk=False,
+      description='Informational Chromium Uprev & Build (public)',
+  )
+
+  chrome_info = site_config.AddTemplate(
+      'chrome-pfq-informational',
+      chromium_info,
+      internal, official,
+      description='Informational Chrome Uprev & Build (internal)',
+  )
+
+  chrome_perf = site_config.AddTemplate(
+      'chrome-perf',
+      chrome_info,
+      no_unittest_builder,
+      no_vmtest_builder,
+      description='Chrome Performance test bot',
+      hw_tests=[config_lib.HWTestConfig(
+          'perf_v2', pool=constants.HWTEST_CHROME_PERF_POOL,
+          timeout=90 * 60, critical=True, num=1)],
+      use_chrome_lkgm=True,
+      use_lkgm=False,
+      useflags=append_useflags(['-cros-debug']),
+  )
+
+
+  # A base config for each board.
+  _base_configs = dict()
+
+  def _CreateBaseConfigs():
+    for board in _all_boards:
+      base = config_lib.BuildConfig()
+
+      if board in _internal_boards:
+        base.update(internal)
+        base.update(official_chrome)
+        base.update(manifest=constants.OFFICIAL_MANIFEST)
+      if board in _brillo_boards:
+        base.update(brillo)
+      if board in _moblab_boards:
+        base.update(moblab)
+      if board in _minimal_profile_boards:
+        base.update(profile='minimal')
+      if board in _nofactory_boards:
+        base.update(factory=False)
+        base.update(factory_toolkit=False)
+        base.update(factory_install_netboot=False)
+      if board in _toolchains_from_source:
+        base.update(usepkg_toolchain=False)
+      if board in _noimagetest_boards:
+        base.update(image_test=False)
+      if board in _nohwqual_boards:
+        base.update(hwqual=False)
+      if board in _norootfs_verification_boards:
+        base.update(rootfs_verification=False)
+      if board in _base_layout_boards:
+        base.update(disk_layout='base')
+      if board in _no_unittest_boards:
+        base.update(no_unittest_builder)
+      if board in _no_vmtest_boards:
+        base.update(no_vmtest_builder)
+      if board in _upload_gce_images_boards:
+        base.update(upload_gce_images=True)
+
+      # TODO(akeshet) Eliminate or clean up this special case.
+      # kayle board has a lot of kayle-specific config changes.
+      if board == 'kayle':
+        base.update(manifest='kayle.xml',
+                    dev_manifest='kayle.xml',
+                    factory_toolkit=False,
+                    # TODO(namnguyen): Cannot build factory net install (no
+                    # usbnet).
+                    factory_install_netboot=False,
+                    # TODO(namngyuyen) Cannot build dev or test images due to
+                    # #436523.
+                    images=['base'])
+
+      board_config = base.derive(boards=[board])
+      # Note: base configs should not specify a useflag list. Convert any
+      # useflags that this base config has accrued (for instance,
+      # 'chrome_internal', via official_chrome) into an append_useflags
+      # callable. This is because the board base config is the last config to be
+      # derived from when creating a board-specific config,
+      if 'useflags' in board_config:
+        board_config['useflags'] = append_useflags(board_config['useflags'])
+      _base_configs[board] = board_config
+
+  _CreateBaseConfigs()
+
+  def _CreateConfigsForBoards(config_base, boards, name_suffix, **kwargs):
+    """Create configs based on |config_base| for all boards in |boards|.
+
+    Note: Existing configs will not be overwritten.
+
+    Args:
+      config_base: A BuildConfig instance to inherit from.
+      boards: A set of boards to create configs for.
+      name_suffix: A naming suffix. Configs will have names of the form
+                   board-name_suffix.
+      **kwargs: Additional keyword arguments to be used in AddConfig.
+    """
+    for board in boards:
+      config_name = '%s-%s' % (board, name_suffix)
+      if config_name not in site_config:
+        base = config_lib.BuildConfig()
+        config = site_config.AddConfig(config_base, config_name, base,
+                                       _base_configs[board], **kwargs)
+        if board in _nofactory_boards:
+          try:
+            config.get('images', []).remove('factory_install')
+          except ValueError:
+            pass
+
+
+  _chromium_pfq_important_boards = frozenset([
+      'arm-generic_freon',
+      'arm-generic',
+      'daisy',
+      'veyron_minnie',
+      'x86-generic',
+  ])
+
+  def _AddFullConfigs():
+    """Add x86 and arm full configs."""
+    external_overrides = config_lib.BuildConfig.delete_keys(internal)
+    external_overrides.update(manifest=config_lib.BuildConfig.delete_key())
+    external_overrides.update(
+        useflags=append_useflags(['-%s' % constants.USE_CHROME_INTERNAL]))
+    _CreateConfigsForBoards(full_prebuilts, _all_full_boards,
+                            config_lib.CONFIG_TYPE_FULL,
+                            **external_overrides)
+    _CreateConfigsForBoards(chromium_info, _all_full_boards,
+                            'tot-chromium-pfq-informational', important=False,
+                            **external_overrides)
+    # Create important configs, then non-important configs.
+    _CreateConfigsForBoards(
+        internal_chromium_pfq, _chromium_pfq_important_boards,
+        'chromium-pfq', **external_overrides)
+    _CreateConfigsForBoards(internal_chromium_pfq, _all_full_boards,
+                            'chromium-pfq', important=False,
+                            **external_overrides)
+
+  _AddFullConfigs()
+
+
+  # These remaining chromium pfq configs have eccentricities that are easier to
+  # create manually.
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'amd64-generic-chromium-pfq',
+      _base_configs['amd64-generic'],
+      disk_layout='2gb-rootfs',
+  )
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'amd64-generic_freon-chromium-pfq',
+      _base_configs['amd64-generic_freon'],
+      disk_layout='2gb-rootfs',
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'x86-generic_freon-chromium-pfq',
+      _base_configs['x86-generic_freon'],
+      vm_tests=[],
+  )
+
+  _chrome_pfq_important_boards = frozenset([
+      'peppy',
+      'rush_ryu',
+      'veyron_pinky',
+      'nyan',
+  ])
+
+
+  # TODO(akeshet): Replace this with a config named x86-alex-chrome-pfq.
+  site_config.AddConfig(
+      chrome_pfq, 'alex-chrome-pfq',
+      _base_configs['x86-alex'],
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'lumpy-chrome-pfq',
+      _base_configs['lumpy'],
+      afdo_generate=True,
+      hw_tests=[HWTestList.AFDORecordTest()] + HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'daisy_skate-chrome-pfq',
+      _base_configs['daisy_skate'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'falco-chrome-pfq',
+      _base_configs['falco'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'peach_pit-chrome-pfq',
+      _base_configs['peach_pit'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'tricky-chrome-pfq',
+      _base_configs['tricky'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  _telemetry_boards = frozenset([
+      'amd64-generic',
+      'arm-generic',
+      'x86-generic',
+  ])
+
+  _CreateConfigsForBoards(telemetry, _telemetry_boards, 'telemetry')
+
+  _toolchain_major = site_config.AddConfigWithoutTemplate(
+      'toolchain-major',
+      _cros_sdk,
+      latest_toolchain=True,
+      prebuilts=False,
+      trybot_list=False,
+      gcc_githash='svn-mirror/google/main',
+      description='Test next major toolchain revision',
+  )
+
+  _toolchain_minor = site_config.AddConfigWithoutTemplate(
+      'toolchain-minor',
+      _cros_sdk,
+      latest_toolchain=True,
+      prebuilts=False,
+      trybot_list=False,
+      gcc_githash='svn-mirror/google/gcc-4_9',
+      description='Test next minor toolchain revision',
+  )
+
+  site_config.AddConfig(
+      llvm,
+      'amd64-generic-llvm',
+      incremental,
+      boards=['amd64-generic'],
+      chroot_replace=True,
+      description='Build with LLVM',
+      trybot_list=True,
+  )
+
+  site_config.AddConfig(
+      asan,
+      'x86-generic-asan',
+      incremental,
+      boards=['x86-generic'],
+      chroot_replace=True,
+      description='Build with Address Sanitizer (Clang)',
+      trybot_list=True,
+  )
+
+  tot_asan_info = site_config.AddTemplate(
+      'tot-asan-informational',
+      chromium_info,
+      asan,
+      description='Build TOT Chrome with Address Sanitizer (Clang)',
+  )
+
+  site_config.AddConfig(
+      tot_asan_info,
+      'x86-generic-tot-asan-informational',
+      boards=['x86-generic'],
+  )
+
+  site_config.AddConfig(
+      asan,
+      'amd64-generic-asan',
+      incremental,
+      boards=['amd64-generic'],
+      description='Build with Address Sanitizer (Clang)',
+      trybot_list=True,
+  )
+
+
+  site_config.AddConfig(
+      tot_asan_info, 'amd64-generic-tot-asan-informational',
+      boards=['amd64-generic'],
+  )
+
+  incremental_beaglebone = incremental.derive(beaglebone)
+  site_config.AddConfig(
+      incremental_beaglebone, 'beaglebone-incremental',
+      boards=['beaglebone'],
+      trybot_list=True,
+      description='Incremental Beaglebone Builder',
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'refresh-packages',
+      no_vmtest_builder,
+      no_hwtest_builder,
+      boards=['x86-generic', 'arm-generic'],
+      builder_class_name='misc_builders.RefreshPackagesBuilder',
+      description='Check upstream Gentoo for package updates',
+  )
+
+  site_config.AddConfig(
+      incremental, 'x86-generic-incremental',
+      _base_configs['x86-generic'],
+  )
+
+  site_config.AddConfig(
+      incremental, 'daisy-incremental',
+      _base_configs['daisy'],
+      config_lib.BuildConfig.delete_keys(internal),
+      manifest=config_lib.BuildConfig.delete_key(),
+      useflags=append_useflags(['-chrome_internal']),
+  )
+
+  site_config.AddConfig(
+      incremental, 'amd64-generic-incremental',
+      _base_configs['amd64-generic'],
+      # This builder runs on a VM, so it can't run VM tests.
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      incremental, 'x32-generic-incremental',
+      _base_configs['x32-generic'],
+      # This builder runs on a VM, so it can't run VM tests.
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      paladin, 'x86-generic-asan-paladin',
+      _base_configs['x86-generic'],
+      asan,
+      description='Paladin build with Address Sanitizer (Clang)',
+      important=False,
+  )
+
+  site_config.AddConfig(
+      paladin, 'amd64-generic-asan-paladin',
+      _base_configs['amd64-generic'],
+      asan,
+      description='Paladin build with Address Sanitizer (Clang)',
+      important=False,
+  )
+
+  _chrome_perf_boards = frozenset([
+      'daisy',
+      'lumpy',
+      'parrot',
+  ])
+
+  _CreateConfigsForBoards(chrome_perf, _chrome_perf_boards, 'chrome-perf',
+                          trybot_list=True)
+
+
+  _CreateConfigsForBoards(chromium_info,
+                          ['x86-generic', 'amd64-generic'],
+                          'telem-chromium-pfq-informational',
+                          **telemetry.derive(chrome_try))
+
+  #
+  # Internal Builds
+  #
+
+  internal_pfq = internal.derive(
+      official_chrome, pfq,
+      overlays=constants.BOTH_OVERLAYS,
+      prebuilts=constants.PRIVATE,
+  )
+
+  # Because branch directories may be shared amongst builders on multiple
+  # branches, they must delete the chroot every time they run.
+  # They also potentially need to build [new] Chrome.
+  internal_pfq_branch = site_config.AddTemplate(
+      'pre-flight-branch',
+      internal_pfq,
+      branch=True,
+      trybot_list=False,
+      sync_chrome=True,
+      active_waterfall=constants.WATERFALL_RELEASE)
+
+  internal_paladin = internal.derive(
+      official_chrome, paladin,
+      manifest=constants.OFFICIAL_MANIFEST,
+      overlays=constants.BOTH_OVERLAYS,
+      prebuilts=constants.PRIVATE,
+      vm_tests=[],
+      description=paladin['description'] + ' (internal)',
+  )
+
+  # Used for paladin builders with nowithdebug flag (a.k.a -cros-debug)
+  internal_nowithdebug_paladin = internal_paladin.derive(
+      useflags=append_useflags(['-cros-debug']),
+      description=paladin['description'] + ' (internal, nowithdebug)',
+      prebuilts=False,
+  )
+
+  _CreateConfigsForBoards(
+      internal_nowithdebug_paladin,
+      ['x86-generic', 'amd64-generic'],
+      'nowithdebug-paladin',
+      important=False,
+  )
+
+  site_config.AddConfig(
+      internal_nowithdebug_paladin,
+      'x86-mario-nowithdebug-paladin',
+      boards=['x86-mario'])
+
+  # Used for builders which build completely from source except Chrome.
+  full_compile_paladin = paladin.derive(
+      board_replace=True,
+      chrome_binhost_only=True,
+      chrome_sdk=False,
+      cpe_export=False,
+      debug_symbols=False,
+      prebuilts=False,
+      unittests=False,
+      upload_hw_test_artifacts=False,
+      vm_tests=[],
+  )
+
+  # falco is the only board that has the -clang-clean CFLAG right now,
+  # so it's important that falco stays as a full-compile builder.
+  # TODO(yunlian): Add -clang-clean to more boards.
+  # See https://chromium-review.googlesource.com/#/c/275862/
+  _CreateConfigsForBoards(
+      full_compile_paladin,
+      ['falco', 'nyan'],
+      'full-compile-paladin',
+  )
+
+  pre_cq = site_config.AddTemplate(
+      'pre-cq',
+      paladin,
+      build_type=constants.INCREMENTAL_TYPE,
+      build_packages_in_background=True,
+      pre_cq=True,
+      archive=False,
+      chrome_sdk=False,
+      chroot_replace=True,
+      debug_symbols=False,
+      prebuilts=False,
+      cpe_export=False,
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests_override=None,
+      description='Verifies compilation, building an image, and vm/unit tests '
+                  'if supported.',
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Pre-CQ',
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com'],
+      health_threshold=3,
+  )
+
+  # Pre-CQ targets that only check compilation and unit tests.
+  unittest_only_pre_cq = pre_cq.derive(
+      no_vmtest_builder,
+      description='Verifies compilation and unit tests only',
+      compilecheck=True,
+  )
+
+  # Pre-CQ targets that don't run VMTests.
+  no_vmtest_pre_cq = site_config.AddTemplate(
+      'no-vmtest-pre-cq',
+      pre_cq,
+      no_vmtest_builder,
+      description='Verifies compilation, building an image, and unit tests '
+                  'if supported.',
+  )
+
+  # Pre-CQ targets that only check compilation.
+  compile_only_pre_cq = site_config.AddTemplate(
+      'compile-only-pre-cq',
+      unittest_only_pre_cq,
+      description='Verifies compilation only',
+      unittests=False,
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      constants.BRANCH_UTIL_CONFIG,
+      internal_paladin,
+      no_vmtest_builder,
+      no_hwtest_builder,
+      boards=[],
+      # Disable postsync_patch to prevent conflicting patches from being applied
+      # - e.g., patches from 'master' branch being applied to a branch.
+      postsync_patch=False,
+      # Disable postsync_reexec to continue running the 'master' branch chromite
+      # for all stages, rather than the chromite in the branch buildroot.
+      postsync_reexec=False,
+      # Need to reset the paladin build_type we inherited.
+      build_type=None,
+      builder_class_name='release_builders.CreateBranchBuilder',
+      description='Used for creating/deleting branches (TPMs only)',
+  )
+
+  # Internal incremental builders don't use official chrome because we want
+  # to test the developer workflow.
+  internal_incremental = internal.derive(
+      incremental,
+      overlays=constants.BOTH_OVERLAYS,
+      description='Incremental Builds (internal)',
+  )
+
+  site_config.AddConfig(
+      internal_pfq_branch, 'lumpy-pre-flight-branch',
+      master=True,
+      push_overlays=constants.BOTH_OVERLAYS,
+      boards=['lumpy'],
+      afdo_generate=True,
+      afdo_update_ebuild=True,
+      hw_tests=[HWTestList.AFDORecordTest()],
+  )
+
+  # A test-ap image is just a test image with a special profile enabled.
+  # Note that each board enabled for test-ap use has to have the testbed-ap
+  # profile linked to from its private overlay.
+  _test_ap = site_config.AddTemplate(
+      'test-ap',
+      internal,
+      default_hw_tests_override,
+      description='WiFi AP images used in testing',
+      profile='testbed-ap',
+      vm_tests=[],
+  )
+
+  site_config.AddGroup(
+      'test-ap-group',
+      site_config.AddConfig(_test_ap, 'stumpy-test-ap', boards=['stumpy']),
+      site_config.AddConfig(_test_ap, 'panther-test-ap', boards=['panther']),
+  )
+
+  ### Master paladin (CQ builder).
+
+  site_config.AddConfig(
+      internal_paladin, 'master-paladin',
+      boards=[],
+      master=True,
+      binhost_test=True,
+      push_overlays=constants.BOTH_OVERLAYS,
+      description='Commit Queue master (all others are slaves)',
+
+      # This name should remain synced with with the name used in
+      # build_internals/masters/master.chromeos/board_config.py.
+      # TODO(mtennant): Fix this.  There should be some amount of auto-
+      # configuration in the board_config.py code.
+      health_threshold=3,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree'],
+      sanity_check_slaves=['wolf-tot-paladin'],
+      trybot_list=False,
+  )
+
+  ### Other paladins (CQ builders).
+  # These are slaves of the master paladin by virtue of matching
+  # in a few config values (e.g. 'build_type', 'branch', etc).  If
+  # they are not 'important' then they are ignored slaves.
+  # TODO(mtennant): This master-slave relationship should be specified
+  # here in the configuration, rather than GetSlavesForMaster().
+  # Something like the following:
+  # master_paladin = site_config.AddConfig(internal_paladin, ...)
+  # master_paladin.AddSlave(site_config.AddConfig(internal_paladin, ...))
+
+  # Sanity check builder, part of the CQ but builds without the patches
+  # under test.
+  site_config.AddConfig(
+      internal_paladin, 'wolf-tot-paladin',
+      boards=['wolf'],
+      do_not_apply_cq_patches=True,
+      prebuilts=False,
+      hw_tests=HWTestList.SharedPoolCQ(),
+  )
+
+  _paladin_boards = _all_boards
+
+  # List of paladin boards where the regular paladin config is important.
+  _paladin_important_boards = frozenset([
+      'amd64-generic',
+      'arm-generic',
+      'auron',
+      'beaglebone',
+      'butterfly',
+      'daisy',
+      'daisy_skate',
+      'daisy_spring',
+      'nyan_freon',
+      'falco',
+      'gizmo',
+      'guado_moblab',
+      'kayle',
+      'lakitu',
+      'lakitu_mobbuild',
+      'leon',
+      'link',
+      'lumpy',
+      'monroe',
+      'nyan',
+      'oak',
+      'panther',
+      'parrot',
+      'peach_pit',
+      'peppy',
+      'rambi',
+      'rush_ryu',
+      'samus',
+      'smaug',
+      'storm',
+      'stout',
+      'strago',
+      'stumpy',
+      'tricky',
+      'veyron_pinky',
+      'whirlwind',
+      'wolf',
+      'x86-alex',
+      'x86-generic',
+      'x86-mario',
+      'x86-zgb',
+  ])
+
+  _paladin_simple_vmtest_boards = frozenset([
+      'rambi',
+      'x86-mario',
+  ])
+
+  _paladin_devmode_vmtest_boards = frozenset([
+      'parrot',
+  ])
+
+  _paladin_cros_vmtest_boards = frozenset([
+      'stout',
+  ])
+
+  _paladin_smoke_vmtest_boards = frozenset([
+      'amd64-generic',
+      'x86-generic',
+  ])
+
+  _paladin_default_vmtest_boards = frozenset([
+      'x32-generic',
+  ])
+
+  _paladin_hwtest_boards = frozenset([
+      'daisy_skate',
+      'link',
+      'lumpy',
+      'peach_pit',
+      'peppy',
+      'stumpy',
+      'wolf',
+      'x86-alex',
+      'x86-zgb',
+  ])
+
+  _paladin_moblab_hwtest_boards = frozenset([
+      'guado_moblab',
+  ])
+
+  _paladin_chroot_replace_boards = frozenset([
+      'butterfly',
+      'daisy_spring',
+  ])
+
+  _paladin_separate_symbols = frozenset([
+      'amd64-generic',
+      'gizmo',
+  ])
+
+  def _CreatePaladinConfigs():
+    for board in _paladin_boards:
+      assert board in _base_configs, '%s not in _base_configs' % board
+      config_name = '%s-%s' % (board, constants.PALADIN_TYPE)
+      customizations = config_lib.BuildConfig()
+      base_config = _base_configs[board]
+      if board in _paladin_hwtest_boards:
+        customizations.update(hw_tests=HWTestList.DefaultListCQ())
+      if board in _paladin_moblab_hwtest_boards:
+        customizations.update(
+            hw_tests=[
+                config_lib.HWTestConfig(
+                    constants.HWTEST_MOBLAB_QUICK_SUITE,
+                    blocking=True, num=1, timeout=120*60,
+                    pool=constants.HWTEST_PALADIN_POOL)
+            ])
+      if board not in _paladin_important_boards:
+        customizations.update(important=False)
+      if board in _paladin_chroot_replace_boards:
+        customizations.update(chroot_replace=True)
+      if board in _internal_boards:
+        customizations = customizations.derive(
+            internal, official_chrome,
+            manifest=constants.OFFICIAL_MANIFEST)
+      if board in _paladin_separate_symbols:
+        customizations.update(separate_debug_symbols=True)
+
+      if board not in _paladin_default_vmtest_boards:
+        vm_tests = []
+        if board in _paladin_simple_vmtest_boards:
+          vm_tests.append(constants.SIMPLE_AU_TEST_TYPE)
+        if board in _paladin_cros_vmtest_boards:
+          vm_tests.append(constants.CROS_VM_TEST_TYPE)
+        if board in _paladin_devmode_vmtest_boards:
+          vm_tests.append(constants.DEV_MODE_TEST_TYPE)
+        if board in _paladin_smoke_vmtest_boards:
+          vm_tests.append(constants.SMOKE_SUITE_TEST_TYPE)
+        customizations.update(vm_tests=vm_tests)
+
+        if paladin.vm_tests_override is not None:
+          # Make sure any new tests are also in override.
+          override = paladin.vm_tests_override[:]
+          for test in vm_tests:
+            if test not in override:
+              override.append(test)
+
+          customizations.update(vm_tests_override=override)
+
+      if base_config.get('internal'):
+        customizations.update(
+            prebuilts=constants.PRIVATE,
+            description=paladin['description'] + ' (internal)')
+      else:
+        customizations.update(prebuilts=constants.PUBLIC)
+      site_config.AddConfig(
+          paladin, config_name,
+          customizations,
+          base_config)
+
+
+  _CreatePaladinConfigs()
+
+
+  site_config.AddConfig(
+      internal_paladin, 'lumpy-incremental-paladin',
+      boards=['lumpy'],
+      build_before_patching=True,
+      prebuilts=False,
+      compilecheck=True,
+      unittests=False,
+  )
+
+  ### Paladins (CQ builders) which do not run VM or Unit tests on the builder
+  ### itself.
+  external_brillo_paladin = paladin.derive(brillo)
+
+  site_config.AddConfig(
+      external_brillo_paladin, 'panther_embedded-minimal-paladin',
+      boards=['panther_embedded'],
+      profile='minimal',
+      trybot_list=True,
+  )
+
+  internal_beaglebone_paladin = internal_paladin.derive(beaglebone)
+
+  site_config.AddConfig(
+      internal_beaglebone_paladin, 'beaglebone-paladin',
+      boards=['beaglebone'],
+      trybot_list=True,
+  )
+
+  site_config.AddConfig(
+      internal_beaglebone_paladin, 'beaglebone_servo-paladin',
+      boards=['beaglebone_servo'],
+      important=False,
+  )
+
+
+  def ShardHWTestsBetweenBuilders(*args):
+    """Divide up the hardware tests between the given list of config names.
+
+    Each of the config names must have the same hardware test suites, and the
+    number of suites must be equal to the number of config names.
+
+    Args:
+      *args: A list of config names.
+    """
+    # List of config names.
+    names = args
+    # Verify sanity before sharding the HWTests.
+    for name in names:
+      assert len(site_config[name].hw_tests) == len(names), \
+        '%s should have %d tests, but found %d' % (
+            name, len(names), len(site_config[name].hw_tests))
+    for name in names[1:]:
+      for test1, test2 in zip(site_config[name].hw_tests,
+                              site_config[names[0]].hw_tests):
+        assert test1.__dict__ == test2.__dict__, \
+            '%s and %s have different hw_tests configured' % (names[0], name)
+
+    # Assign each config the Nth HWTest.
+    for i, name in enumerate(names):
+      site_config[name]['hw_tests'] = [site_config[name].hw_tests[i]]
+
+  # Shard the bvt-inline and bvt-cq hw tests between similar builders.
+  # The first builder gets bvt-inline, and the second builder gets bvt-cq.
+  # bvt-cq takes longer, so it usually makes sense to give it the faster board.
+  ShardHWTestsBetweenBuilders('x86-zgb-paladin', 'x86-alex-paladin')
+  ShardHWTestsBetweenBuilders('wolf-paladin', 'peppy-paladin')
+  ShardHWTestsBetweenBuilders('daisy_skate-paladin', 'peach_pit-paladin')
+  ShardHWTestsBetweenBuilders('lumpy-paladin', 'stumpy-paladin')
+
+  # Add a pre-cq config for every board.
+  _CreateConfigsForBoards(pre_cq, _all_boards, 'pre-cq')
+  # Override 'lakitu-pre-cq' - it's in _brillo_boards, but should run vmtests.
+  site_config.AddConfig(
+      pre_cq, 'lakitu-pre-cq',
+      _base_configs['lakitu'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+  )
+
+  _CreateConfigsForBoards(no_vmtest_pre_cq, _all_boards, 'no-vmtest-pre-cq')
+  _CreateConfigsForBoards(
+      compile_only_pre_cq, _all_boards, 'compile-only-pre-cq')
+
+  site_config.AddConfig(
+      pre_cq, constants.BINHOST_PRE_CQ,
+      no_vmtest_pre_cq,
+      internal,
+      boards=[],
+      binhost_test=True,
+  )
+
+  # TODO(davidjames): Add peach_pit, nyan, and beaglebone to pre-cq.
+  # TODO(davidjames): Update daisy_spring to build images again.
+  site_config.AddGroup(
+      'mixed-a-pre-cq',
+      # daisy_spring w/kernel 3.8.
+      site_config['daisy_spring-compile-only-pre-cq'],
+      # lumpy w/kernel 3.8.
+      site_config['lumpy-compile-only-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'mixed-b-pre-cq',
+      # arm64 w/kernel 3.14.
+      site_config['rush_ryu-compile-only-pre-cq'],
+      # samus w/kernel 3.14.
+      site_config['samus-compile-only-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'mixed-c-pre-cq',
+      # brillo
+      site_config['storm-compile-only-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'external-mixed-pre-cq',
+      site_config['x86-generic-no-vmtest-pre-cq'],
+      site_config['amd64-generic-no-vmtest-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'kernel-3_14-a-pre-cq',
+      site_config['x86-generic-no-vmtest-pre-cq'],
+      site_config['arm-generic-no-vmtest-pre-cq']
+  )
+
+  site_config.AddGroup(
+      'kernel-3_14-b-pre-cq',
+      site_config['storm-no-vmtest-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'kernel-3_14-c-pre-cq',
+      site_config['veyron_pinky-no-vmtest-pre-cq'],
+      site_config['rush_ryu-no-vmtest-pre-cq']
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'pre-cq-launcher',
+      internal_paladin,
+      no_vmtest_builder,
+      no_hwtest_builder,
+      boards=[],
+      build_type=constants.PRE_CQ_LAUNCHER_TYPE,
+      description='Launcher for Pre-CQ builders',
+      trybot_list=False,
+      manifest_version=False,
+      # Every Pre-CQ launch failure should send out an alert.
+      health_threshold=1,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree'],
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Pre-CQ',
+  )
+
+
+  site_config.AddConfig(
+      internal_incremental, 'mario-incremental',
+      boards=['x86-mario'],
+  )
+
+  site_config.AddConfig(
+      internal_incremental, 'lakitu-incremental',
+      _base_configs['lakitu'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'internal-toolchain-major',
+      _toolchain_major, internal, official,
+      boards=['x86-alex', 'stumpy', 'daisy', 'lakitu'],
+      build_tests=True,
+      description=_toolchain_major['description'] + ' (internal)',
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'internal-toolchain-minor',
+      _toolchain_minor, internal, official,
+      boards=['x86-alex', 'stumpy', 'daisy', 'lakitu'],
+      build_tests=True,
+      description=_toolchain_minor['description'] + ' (internal)',
+  )
+
+  _release = site_config.AddTemplate(
+      'release',
+      full,
+      official,
+      internal,
+      default_hw_tests_override,
+      build_type=constants.CANARY_TYPE,
+      useflags=append_useflags(['-cros-debug']),
+      build_tests=True,
+      afdo_use=True,
+      manifest=constants.OFFICIAL_MANIFEST,
+      manifest_version=True,
+      images=['base', 'recovery', 'test', 'factory_install'],
+      push_image=True,
+      upload_symbols=True,
+      binhost_bucket='gs://chromeos-dev-installer',
+      binhost_key='RELEASE_BINHOST',
+      binhost_base_url='https://commondatastorage.googleapis.com/'
+                       'chromeos-dev-installer',
+      dev_installer_prebuilts=True,
+      git_sync=False,
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE,
+                constants.DEV_MODE_TEST_TYPE,
+                constants.CROS_VM_TEST_TYPE],
+      hw_tests=HWTestList.SharedPoolCanary(),
+      paygen=True,
+      signer_tests=True,
+      trybot_list=True,
+      hwqual=True,
+      description="Release Builds (canary) (internal)",
+      chrome_sdk=True,
+      image_test=True,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Canaries',
+  )
+
+  _grouped_config = config_lib.BuildConfig(
+      build_packages_in_background=True,
+      chrome_sdk_build_chrome=False,
+      unittests=None,
+      vm_tests=[],
+  )
+
+  _grouped_variant_config = _grouped_config.derive(
+      chrome_sdk=False,
+  )
+
+  _grouped_variant_release = _release.derive(_grouped_variant_config)
+
+  ### Master release config.
+
+  site_config.AddConfig(
+      _release, 'master-release',
+      boards=[],
+      master=True,
+      sync_chrome=False,
+      chrome_sdk=False,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree'],
+      afdo_use=False,
+      branch_util_test=True,
+  )
+
+  ### Release config groups.
+
+  site_config.AddGroup(
+      'x86-alex-release-group',
+      site_config.AddConfig(
+          _release, 'x86-alex-release',
+          boards=['x86-alex'],
+      ),
+      site_config.AddConfig(
+          _grouped_variant_release, 'x86-alex_he-release',
+          boards=['x86-alex_he'],
+          hw_tests=[],
+          upload_hw_test_artifacts=False,
+          paygen_skip_testing=True,
+      ),
+  )
+
+  site_config.AddGroup(
+      'x86-zgb-release-group',
+      site_config.AddConfig(
+          _release, 'x86-zgb-release',
+          boards=['x86-zgb'],
+      ),
+      site_config.AddConfig(
+          _grouped_variant_release, 'x86-zgb_he-release',
+          boards=['x86-zgb_he'],
+          hw_tests=[],
+          upload_hw_test_artifacts=False,
+          paygen_skip_testing=True,
+      ),
+  )
+
+  ### Release AFDO configs.
+
+  release_afdo = _release.derive(
+      trybot_list=False,
+      hw_tests=(
+          HWTestList.DefaultList(pool=constants.HWTEST_SUITES_POOL, num=4) +
+          HWTestList.AFDOList()
+      ),
+      push_image=False,
+      paygen=False,
+      dev_installer_prebuilts=False,
+  )
+
+  release_afdo_generate = site_config.AddTemplate(
+      config_lib.CONFIG_TYPE_RELEASE_AFDO + '-generate',
+      release_afdo,
+      afdo_generate_min=True,
+      afdo_use=False,
+      afdo_update_ebuild=True,
+
+      hw_tests=[HWTestList.AFDORecordTest()],
+      hw_tests_override=[HWTestList.AFDORecordTest(
+          num=constants.HWTEST_TRYBOT_NUM,
+          pool=constants.HWTEST_TRYBOT_POOL,
+          file_bugs=False,
+          priority=constants.HWTEST_DEFAULT_PRIORITY,
+      )],
+  )
+
+  release_afdo_use = site_config.AddTemplate(
+      config_lib.CONFIG_TYPE_RELEASE_AFDO + '-use',
+      release_afdo,
+      afdo_use=True,
+  )
+
+  # Now generate generic release-afdo configs if we haven't created anything
+  # more specific above already. release-afdo configs are builders that do AFDO
+  # profile collection and optimization in the same builder. Used by developers
+  # that want to measure performance changes caused by their changes.
+  def _AddAFDOConfigs():
+    for board in _all_release_boards:
+      base = _base_configs[board]
+
+      config_name = '%s-%s' % (board, config_lib.CONFIG_TYPE_RELEASE_AFDO)
+      if config_name in site_config:
+        continue
+
+      generate_config_name = (
+          '%s-%s-%s' % (board,
+                        config_lib.CONFIG_TYPE_RELEASE_AFDO,
+                        'generate'))
+      use_config_name = '%s-%s-%s' % (board,
+                                      config_lib.CONFIG_TYPE_RELEASE_AFDO,
+                                      'use')
+
+      # We can't use AFDO data if afdo_use is disabled for this board.
+      if not base.get('afdo_use', True):
+        continue
+
+      site_config.AddGroup(
+          config_name,
+          site_config.AddConfig(
+              release_afdo_generate, generate_config_name, base
+          ),
+          site_config.AddConfig(
+              release_afdo_use, use_config_name, base
+          ),
+      )
+
+  _AddAFDOConfigs()
+
+  ### Release configs.
+
+  _critical_for_chrome_boards = frozenset([
+      'daisy',
+      'lumpy',
+      'parrot',
+  ])
+
+  # bayleybay-release does not enable vm_tests or unittests due to the compiler
+  # flags enabled for baytrail.
+  site_config.AddConfig(
+      _release, 'bayleybay-release',
+      boards=['bayleybay'],
+      hw_tests=[],
+      vm_tests=[],
+      unittests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'beltino-release',
+      boards=['beltino'],
+      hw_tests=[],
+      vm_tests=[],
+  )
+
+  # bobcat-release does not enable vm_tests or unittests due to the compiler
+  # flags enabled for baytrail.
+  site_config.AddConfig(
+      _release, 'bobcat-release',
+      boards=['bobcat'],
+      hw_tests=[],
+      profile='minimal',
+      # This build doesn't generate signed images, so don't try to release them.
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'gizmo-release',
+      _base_configs['gizmo'],
+      important=True,
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'samus-release',
+      _base_configs['samus'],
+      important=True,
+  )
+
+  ### Arm release configs.
+
+  site_config.AddConfig(
+      _release, 'veyron_rialto-release',
+      _base_configs['veyron_rialto'],
+      # rialto does not use Chrome.
+      sync_chrome=False,
+      chrome_sdk=False,
+  )
+
+  # Now generate generic release configs if we haven't created anything more
+  # specific above already.
+  def _AddReleaseConfigs():
+    # We have to mark all autogenerated PFQs as not important so the master
+    # does not wait for them.  http://crbug.com/386214
+    # If you want an important PFQ, you'll have to declare it yourself.
+    _CreateConfigsForBoards(
+        chrome_info, _all_release_boards, 'tot-chrome-pfq-informational',
+        important=False)
+    _CreateConfigsForBoards(
+        chrome_pfq, _chrome_pfq_important_boards, 'chrome-pfq')
+    _CreateConfigsForBoards(
+        chrome_pfq, _all_release_boards, 'chrome-pfq', important=False)
+    _CreateConfigsForBoards(
+        _release, _critical_for_chrome_boards, config_lib.CONFIG_TYPE_RELEASE,
+        critical_for_chrome=True)
+    _CreateConfigsForBoards(
+        _release, _all_release_boards, config_lib.CONFIG_TYPE_RELEASE)
+
+  _AddReleaseConfigs()
+
+  site_config.AddConfig(
+      _release, 'panther_embedded-minimal-release',
+      _base_configs['panther_embedded'],
+      profile='minimal',
+      important=True,
+      paygen=False,
+      signer_tests=False,
+  )
+
+  # beaglebone build doesn't generate signed images, so don't try to release
+  # them.
+  _beaglebone_release = _release.derive(beaglebone, paygen=False,
+                                        signer_tests=False,
+                                        images=['base', 'test'])
+
+  site_config.AddGroup(
+      'beaglebone-release-group',
+      site_config.AddConfig(
+          _beaglebone_release, 'beaglebone-release',
+          boards=['beaglebone'],
+      ),
+      site_config.AddConfig(
+          _beaglebone_release, 'beaglebone_servo-release',
+          boards=['beaglebone_servo'],
+          payload_image='base'
+      ).derive(_grouped_variant_config),
+      important=True,
+  )
+
+  site_config.AddConfig(
+      _release, 'kayle-release',
+      _base_configs['kayle'],
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'storm-release',
+      _base_configs['storm'],
+
+      # Hw Lab can't test storm, yet.
+      paygen_skip_testing=True,
+      signer_tests=False,
+  )
+
+  moblab_release = site_config.AddTemplate(
+      'moblab-release',
+      _release,
+      description='Moblab release builders',
+      images=['base', 'recovery', 'test'],
+      paygen_skip_delta_payloads=True,
+      # TODO: re-enable paygen testing when crbug.com/386473 is fixed.
+      paygen_skip_testing=True,
+      important=False,
+      afdo_use=False,
+      signer_tests=False,
+      hw_tests=[
+          config_lib.HWTestConfig(constants.HWTEST_MOBLAB_SUITE, blocking=True,
+                                  num=1, timeout=120*60),
+          config_lib.HWTestConfig(constants.HWTEST_BVT_SUITE, blocking=True,
+                                  warn_only=True, num=1),
+          config_lib.HWTestConfig(constants.HWTEST_AU_SUITE, blocking=True,
+                                  warn_only=True, num=1)],
+  )
+
+  site_config.AddConfig(
+      moblab_release, 'stumpy_moblab-release',
+      _base_configs['stumpy_moblab'],
+  )
+
+  site_config.AddConfig(
+      moblab_release, 'guado_moblab-release',
+      _base_configs['guado_moblab'],
+  )
+
+  site_config.AddConfig(
+      moblab_release, 'panther_moblab-release',
+      _base_configs['panther_moblab'],
+  )
+
+  site_config.AddConfig(
+      _release, 'rush-release',
+      _base_configs['rush'],
+      hw_tests=[],
+      # This build doesn't generate signed images, so don't try to release them.
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'rush_ryu-release',
+      _base_configs['rush_ryu'],
+      images=['base', 'test', 'factory_install'],
+      dev_installer_prebuilts=False,
+      paygen=False,
+      signer_tests=False,
+      push_image=False,
+      hw_tests=[],
+  )
+
+  site_config.AddConfig(
+      _release, 'veyron_mickey-release',
+      _base_configs['veyron_mickey'],
+      hw_tests=[],
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      _release, 'veyron_romy-release',
+      _base_configs['veyron_romy'],
+      hw_tests=[],
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      _release, 'whirlwind-release',
+      _base_configs['whirlwind'],
+      dev_installer_prebuilts=True,
+  )
+
+  site_config.AddConfig(
+      _release, 'lakitu-release',
+      _base_configs['lakitu'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      important=True,
+  )
+
+  site_config.AddConfig(
+      _release, 'lakitu_mobbuild-release',
+      _base_configs['lakitu_mobbuild'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      signer_tests=False,
+      important=True,
+  )
+
+  _wificell_pre_cq = site_config.AddTemplate(
+      'wificell-pre-cq',
+      pre_cq,
+      unittests=False,
+      hw_tests=HWTestList.WiFiCellPoolPreCQ(),
+      hw_tests_override=HWTestList.WiFiCellPoolPreCQ(),
+      archive=True,
+      image_test=False,
+      description='WiFi tests acting as pre-cq for WiFi related changes',
+  )
+
+  site_config.AddGroup(
+      'mixed-wificell-pre-cq',
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'winky-wificell-pre-cq',
+          _base_configs['winky']),
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'veyron_speedy-wificell-pre-cq',
+          _base_configs['veyron_speedy']),
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'veyron_jerry-wificell-pre-cq',
+          _base_configs['veyron_jerry']),
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'daisy-wificell-pre-cq',
+          _base_configs['daisy']),
+  )
+
+  ### Per-chipset release groups
+
+  def _AddGroupConfig(name, base_board, group_boards=None,
+                      group_variant_boards=None, **kwargs):
+    """Generate full & release group configs."""
+    def _boards_list(x):
+      # Make sure _boards_list is a valid list (not None or tuple)
+      return [] if x is None else list(x)
+
+    group_boards = _boards_list(group_boards)
+    group_variant_boards = _boards_list(group_variant_boards)
+
+    for group in ('release', 'full'):
+      configs = []
+
+      all_boards = [base_board] + group_boards + group_variant_boards
+      desc = '%s; Group config (boards: %s)' % (
+          site_config['%s-%s' % (base_board, group)].description,
+          ', '.join(all_boards))
+
+      for board in all_boards:
+        if board in group_boards:
+          subconfig = _grouped_config
+        elif board in group_variant_boards:
+          subconfig = _grouped_variant_config
+        else:
+          subconfig = {}
+        board_config = '%s-%s' % (board, group)
+        configs.append(site_config[board_config].derive(subconfig, **kwargs))
+
+        config_name = '%s-%s-group' % (name, group)
+        important = group == 'release' and kwargs.get('important', True)
+      site_config.AddGroup(
+          config_name, *configs, description=desc,
+          important=important
+      )
+
+  # pineview chipset boards
+  _AddGroupConfig(
+      'pineview', 'x86-mario', (
+          'x86-alex',
+          'x86-zgb',
+      ), (
+          'x86-alex_he',
+          'x86-zgb_he',
+      )
+  )
+
+  # sandybridge chipset boards
+  _AddGroupConfig(
+      'sandybridge', 'parrot', (
+          'lumpy',
+          'butterfly',
+          'stumpy',
+      )
+  )
+
+  # ivybridge chipset boards
+  _AddGroupConfig(
+      'ivybridge', 'stout', (
+          'link',
+      ), (
+          'parrot_ivb',
+      )
+  )
+
+  # slippy-based haswell boards
+  # TODO(davidjames): Combine slippy and beltino into haswell canary, once we've
+  # optimized our builders more.
+  # slippy itself is deprecated in favor of the below boards, so we don't bother
+  # building it.
+  # TODO(dnj): Re-add peppy canary once builders are allocated.
+  _AddGroupConfig(
+      'slippy', 'peppy', (
+          'falco',
+          'leon',
+          'wolf',
+      ), (
+          'falco_li',
+      )
+  )
+
+  # beltino-based haswell boards
+  # beltino itself is deprecated in favor of the below boards, so we don't
+  # bother building it.
+
+  _AddGroupConfig(
+      'beltino-a', 'panther', (
+          'mccloud',
+      )
+  )
+
+  _AddGroupConfig(
+      'beltino-b', 'monroe', (
+          'tricky',
+          'zako',
+      )
+  )
+
+  # rambi-based boards
+  _AddGroupConfig(
+      'rambi-a', 'rambi', (
+          'clapper',
+          'enguarde',
+          'expresso',
+      )
+  )
+
+  _AddGroupConfig(
+      'rambi-b', 'glimmer', (
+          'gnawty',
+          'kip',
+          'quawks',
+      )
+  )
+
+  _AddGroupConfig(
+      'rambi-c', 'squawks', (
+          'swanky',
+          'winky',
+          'candy',
+      )
+  )
+
+  _AddGroupConfig(
+      'rambi-d', 'banjo', (
+          'ninja',
+          'sumo',
+      ),
+  )
+
+  _AddGroupConfig(
+      'rambi-e', 'orco', (
+          'heli',
+          'wizpig',
+      ),
+  )
+
+  # daisy-based boards
+  _AddGroupConfig(
+      'daisy', 'daisy', (
+          'daisy_spring',
+          'daisy_skate',
+      ),
+  )
+
+  # peach-based boards
+  _AddGroupConfig(
+      'peach', 'peach_pit', (
+          'peach_pi',
+      )
+  )
+
+  # nyan-based boards
+  _AddGroupConfig(
+      'nyan', 'nyan', (
+          'nyan_big',
+          'nyan_blaze',
+          'nyan_kitty',
+      )
+  )
+
+  # auron-based boards
+  _AddGroupConfig(
+      'auron', 'auron', (
+          'auron_yuna',
+          'auron_paine',
+      )
+  )
+
+  _AddGroupConfig(
+      'auron-b', 'lulu', (
+          'gandof',
+          'buddy',
+      ),
+  )
+
+  # veyron-based boards
+  _AddGroupConfig(
+      'veyron', 'veyron_pinky', (
+          'veyron_jerry',
+          'veyron_mighty',
+          'veyron_speedy'
+      ),
+  )
+
+  _AddGroupConfig(
+      'veyron-b', 'veyron_gus', (
+          'veyron_jaq',
+          'veyron_minnie',
+          'veyron_rialto',
+      ),
+  )
+
+  _AddGroupConfig(
+      'veyron-c', 'veyron_brain', (
+          'veyron_danger',
+          'veyron_thea',
+          'veyron_shark',
+      ),
+      important=False,
+  )
+
+  _AddGroupConfig(
+      'veyron-d', 'veyron_mickey', (
+          'veyron_romy',
+      ),
+  )
+
+  # jecht-based boards
+  _AddGroupConfig(
+      'jecht', 'jecht', (
+          'guado',
+          'tidus',
+          'rikku',
+      )
+  )
+
+  # strago-based boards
+  _AddGroupConfig(
+      'strago', 'strago', (
+          'cyan',
+          'celes',
+          'ultima',
+      ),
+      important=False,
+  )
+
+  # oak-based boards
+  _AddGroupConfig(
+      'oak', 'oak', (
+      )
+  )
+
+  # glados-based boards
+  _AddGroupConfig(
+      'glados', 'glados', (
+      ),
+  )
+
+  # storm-based boards
+  _AddGroupConfig(
+      'storm', 'storm', (
+          'arkham',
+          'whirlwind',
+      ),
+      important=False,
+  )
+
+  # kunimitsu-based boards
+  _AddGroupConfig(
+      'kunimitsu', 'kunimitsu', (
+      ),
+  )
+
+  # Factory and Firmware releases much inherit from these classes.
+  # Modifications for these release builders should go here.
+
+  # Naming conventions also must be followed. Factory and firmware branches
+  # must end in -factory or -firmware suffixes.
+
+  _factory_release = site_config.AddTemplate(
+      'factory',
+      _release,
+      upload_hw_test_artifacts=False,
+      upload_symbols=False,
+      hw_tests=[],
+      chrome_sdk=False,
+      description='Factory Builds',
+      paygen=False,
+      afdo_use=False,
+  )
+
+  _firmware = config_lib.BuildConfig(
+      no_vmtest_builder,
+      images=[],
+      factory_toolkit=False,
+      packages=['virtual/chromeos-firmware', 'chromeos-base/autotest-all'],
+      usepkg_build_packages=True,
+      sync_chrome=False,
+      build_tests=True,
+      chrome_sdk=False,
+      unittests=False,
+      hw_tests=[],
+      dev_installer_prebuilts=False,
+      upload_hw_test_artifacts=True,
+      upload_symbols=False,
+      useflags=['chromeless_tty'],
+      signer_tests=False,
+      trybot_list=False,
+      paygen=False,
+      image_test=False,
+  )
+
+  _firmware_release = site_config.AddTemplate(
+      'firmware',
+      _release,
+      _firmware,
+      description='Firmware Canary',
+      manifest=constants.DEFAULT_MANIFEST,
+      afdo_use=False,
+  )
+
+  _depthcharge_release = site_config.AddTemplate(
+      'depthcharge-firmware',
+      _firmware_release,
+      useflags=append_useflags(['depthcharge']))
+
+  _depthcharge_full_internal = site_config.AddTemplate(
+      'depthcharge-full-firmware',
+      full,
+      internal,
+      _firmware,
+      useflags=append_useflags(['depthcharge']),
+      description='Firmware Informational',
+  )
+
+  _firmware_boards = frozenset([
+      'auron',
+      'banjo',
+      'bayleybay',
+      'beltino',
+      'butterfly',
+      'candy',
+      'clapper',
+      'cyan',
+      'daisy',
+      'daisy_skate',
+      'daisy_spring',
+      'enguarde',
+      'expresso',
+      'falco',
+      'glimmer',
+      'gnawty',
+      'jecht',
+      'kip',
+      'leon',
+      'link',
+      'lumpy',
+      'monroe',
+      'ninja',
+      'orco',
+      'panther',
+      'parrot',
+      'parry',
+      'peach_pi',
+      'peach_pit',
+      'peppy',
+      'quawks',
+      'rambi',
+      'rikku',
+      'samus',
+      'slippy',
+      'smaug',
+      'squawks',
+      'storm',
+      'stout',
+      'strago',
+      'stumpy',
+      'sumo',
+      'swanky',
+      'winky',
+      'wolf',
+      'x86-mario',
+      'zako',
+  ])
+
+  _x86_depthcharge_firmware_boards = frozenset([
+      'auron',
+      'banjo',
+      'bayleybay',
+      'candy',
+      'clapper',
+      'cyan',
+      'enguarde',
+      'expresso',
+      'glados',
+      'glimmer',
+      'gnawty',
+      'heli',
+      'jecht',
+      'kip',
+      'kunimitsu',
+      'leon',
+      'link',
+      'ninja',
+      'orco',
+      'parry',
+      'quawks',
+      'rambi',
+      'rikku',
+      'samus',
+      'squawks',
+      'strago',
+      'sumo',
+      'swanky',
+      'winky',
+      'zako',
+  ])
+
+
+  def _AddFirmwareConfigs():
+    """Add x86 and arm firmware configs."""
+    for board in _firmware_boards:
+      site_config.AddConfig(
+          _firmware_release,
+          '%s-%s' % (board, config_lib.CONFIG_TYPE_FIRMWARE),
+          _base_configs[board],
+          no_vmtest_builder,
+      )
+
+    for board in _x86_depthcharge_firmware_boards:
+      site_config.AddConfig(
+          _depthcharge_release,
+          '%s-%s-%s' % (board, 'depthcharge', config_lib.CONFIG_TYPE_FIRMWARE),
+          _base_configs[board],
+          no_vmtest_builder,
+      )
+      site_config.AddConfig(
+          _depthcharge_full_internal,
+          '%s-%s-%s-%s' % (board, 'depthcharge', config_lib.CONFIG_TYPE_FULL,
+                           config_lib.CONFIG_TYPE_FIRMWARE),
+          _base_configs[board],
+          no_vmtest_builder,
+      )
+
+  _AddFirmwareConfigs()
+
+
+  # This is an example factory branch configuration.
+  # Modify it to match your factory branch.
+  site_config.AddConfig(
+      _factory_release, 'x86-mario-factory',
+      boards=['x86-mario'],
+  )
+
+  _payloads = site_config.AddTemplate(
+      'payloads',
+      internal,
+      no_vmtest_builder,
+      no_unittest_builder,
+      no_hwtest_builder,
+      build_type=constants.PAYLOADS_TYPE,
+      builder_class_name='release_builders.GeneratePayloadsBuilder',
+      description='Regenerate release payloads.',
+
+      # Sync to the code used to do the build the first time.
+      manifest_version=True,
+
+      # This is the actual work we want to do.
+      paygen=True,
+
+      upload_hw_test_artifacts=False,
+  )
+
+  def _AddPayloadConfigs():
+    """Create <board>-payloads configs for all payload generating boards.
+
+    We create a config named 'board-payloads' for every board which has a
+    config with 'paygen' True. The idea is that we have a build that generates
+    payloads, we need to have a tryjob to re-attempt them on failure.
+    """
+    payload_boards = set()
+
+    def _search_config_and_children(search_config):
+      # If paygen is enabled, add it's boards to our list of payload boards.
+      if search_config['paygen']:
+        for board in search_config['boards']:
+          payload_boards.add(board)
+
+      # Recurse on any child configs.
+      for child in search_config['child_configs']:
+        _search_config_and_children(child)
+
+    # Search all configs for boards that generate payloads.
+    for _, search_config in site_config.iteritems():
+      _search_config_and_children(search_config)
+
+    # Generate a payloads trybot config for every board that generates payloads.
+    for board in payload_boards:
+      name = '%s-payloads' % board
+      site_config.AddConfig(_payloads, name, boards=[board])
+
+  _AddPayloadConfigs()
+
+  # Add special builders to help with cbuidlbot development/testing.
+  site_config.Add(
+      'sync-test-cbuildbot',
+      no_hwtest_builder,
+      boards=[],
+      builder_class_name='test_builders.ManifestVersionedSyncBuilder',
+      chroot_replace=True,
+  )
+
+  def _SetupWaterfalls():
+    for name, c in site_config.iteritems():
+      if not c.get('active_waterfall'):
+        c['active_waterfall'] = GetDefaultWaterfall(c)
+
+    # Apply manual configs.
+    for waterfall, names in _waterfall_config_map.iteritems():
+      for name in names:
+        site_config[name]['active_waterfall'] = waterfall
+
+  _SetupWaterfalls()
+
+
+  def _InsertHwTestsOverrideDefaults(build):
+    """Insert default hw_tests values for a given build.
+
+    Also updates child builds.
+
+    Args:
+      build: BuildConfig instance to modify in place.
+    """
+    for child in build['child_configs']:
+      _InsertHwTestsOverrideDefaults(child)
+
+    if build['hw_tests_override'] is not None:
+      # Explicitly set, no need to insert defaults.
+      return
+
+    if not build['hw_tests']:
+      build['hw_tests_override'] = HWTestList.DefaultList(
+          num=constants.HWTEST_TRYBOT_NUM, pool=constants.HWTEST_TRYBOT_POOL,
+          file_bugs=False)
+    else:
+      # Copy over base tests.
+      build['hw_tests_override'] = [copy.copy(x) for x in build['hw_tests']]
+
+      # Adjust for manual test environment.
+      for hw_config in build['hw_tests_override']:
+        hw_config.num = constants.HWTEST_TRYBOT_NUM
+        hw_config.pool = constants.HWTEST_TRYBOT_POOL
+        hw_config.file_bugs = False
+        hw_config.priority = constants.HWTEST_DEFAULT_PRIORITY
+
+    # TODO: Fix full_release_test.py/AUTest on trybots, crbug.com/390828.
+    build['hw_tests_override'] = [
+        hw_config for hw_config in build['hw_tests_override']
+        if hw_config.suite != constants.HWTEST_AU_SUITE]
+
+  for build in site_config.itervalues():
+    _InsertHwTestsOverrideDefaults(build)
+
+  return site_config
diff --git a/cbuildbot/chromeos_config_unittest b/cbuildbot/chromeos_config_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/chromeos_config_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/chromeos_config_unittest.py b/cbuildbot/chromeos_config_unittest.py
new file mode 100644
index 0000000..82f4c78
--- /dev/null
+++ b/cbuildbot/chromeos_config_unittest.py
@@ -0,0 +1,824 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for config."""
+
+from __future__ import print_function
+
+import mock
+import re
+import cPickle
+
+from chromite.cbuildbot import builders
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.builders import generic_builders
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+
+# pylint: disable=protected-access
+
+CHROMIUM_WATCHING_URL = (
+    'http://src.chromium.org/chrome/trunk/tools/build/masters/'
+    'master.chromium.chromiumos/master_chromiumos_cros_cfg.py'
+)
+
+
+class GenerateChromeosConfigTestBase(cros_test_lib.TestCase):
+  """Base class for tests of chromeos_config.."""
+
+  def setUp(self):
+    self.all_configs = chromeos_config.GetConfig()
+
+
+class ConfigDumpTest(GenerateChromeosConfigTestBase):
+  """Tests related to config_dump.json & chromeos_config.py"""
+
+  def testDump(self):
+    """Make sure the json & config are kept in sync"""
+    new_dump = self.all_configs.SaveConfigToString()
+    old_dump = osutils.ReadFile(constants.CHROMEOS_CONFIG_FILE).rstrip()
+
+    self.assertTrue(
+        new_dump == old_dump, 'config_dump.json does not match the '
+        'configs defined in chromeos_config.py. Run '
+        'bin/cbuildbot_view_config --update_config')
+
+  def testSaveLoadReload(self):
+    """Make sure that loading and reloading the config is a no-op."""
+    dump = self.all_configs.SaveConfigToString()
+    loaded = config_lib.LoadConfigFromString(dump)
+    self.assertEqual(self.all_configs, loaded)
+
+
+class ConfigPickleTest(GenerateChromeosConfigTestBase):
+  """Test that a config object is pickleable."""
+
+  def testPickle(self):
+    bc1 = self.all_configs['x86-mario-paladin']
+    bc2 = cPickle.loads(cPickle.dumps(bc1))
+
+    self.assertEquals(bc1.boards, bc2.boards)
+    self.assertEquals(bc1.name, bc2.name)
+
+
+class ConfigClassTest(GenerateChromeosConfigTestBase):
+  """Tests of the config class itself."""
+
+  def testAppendUseflags(self):
+    base_config = config_lib.BuildConfig()
+    inherited_config_1 = base_config.derive(
+        useflags=chromeos_config.append_useflags(
+            ['foo', 'bar', '-baz']))
+    inherited_config_2 = inherited_config_1.derive(
+        useflags=chromeos_config.append_useflags(['-bar', 'baz']))
+    self.assertEqual(inherited_config_1.useflags, ['-baz', 'bar', 'foo'])
+    self.assertEqual(inherited_config_2.useflags, ['-bar', 'baz', 'foo'])
+
+
+class CBuildBotTest(GenerateChromeosConfigTestBase):
+  """General tests of chromeos_config."""
+
+  def testConfigsKeysMismatch(self):
+    """Verify that all configs contain exactly the default keys.
+
+    This checks for mispelled keys, or keys that are somehow removed.
+    """
+    expected_keys = set(self.all_configs.GetDefault().iterkeys())
+    for build_name, config in self.all_configs.iteritems():
+      config_keys = set(config.keys())
+
+      extra_keys = config_keys.difference(expected_keys)
+      self.assertFalse(extra_keys, ('Config %s has extra values %s' %
+                                    (build_name, list(extra_keys))))
+
+      missing_keys = expected_keys.difference(config_keys)
+      self.assertFalse(missing_keys, ('Config %s is missing values %s' %
+                                      (build_name, list(missing_keys))))
+
+  def testConfigsHaveName(self):
+    """Configs must have names set."""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertTrue(build_name == config['name'])
+
+  def testConfigUseflags(self):
+    """Useflags must be lists.
+
+    Strings are interpreted as arrays of characters for this, which is not
+    useful.
+    """
+    for build_name, config in self.all_configs.iteritems():
+      useflags = config.get('useflags')
+      if not useflags is None:
+        self.assertTrue(
+            isinstance(useflags, list),
+            'Config %s: useflags should be a list.' % build_name)
+
+  def testBoards(self):
+    """Verify 'boards' is explicitly set for every config."""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertTrue(isinstance(config['boards'], (tuple, list)),
+                      "Config %s doesn't have a list of boards." % build_name)
+      self.assertEqual(len(set(config['boards'])), len(config['boards']),
+                       'Config %s has duplicate boards.' % build_name)
+      if config['builder_class_name'] in (
+          'sdk_builders.ChrootSdkBuilder',
+          'misc_builders.RefreshPackagesBuilder'):
+        self.assertTrue(len(config['boards']) >= 1,
+                        'Config %s requires 1 or more boards.' % build_name)
+      else:
+        # TODO: Switch to assert after wificell-pre-cq is fixed.
+        if not len(config['boards']) <= 1:
+          print('Config %s should have <= 1 board.' % build_name)
+        # self.assertTrue(len(config['boards']) <= 1,
+        #                 'Config %s should have <= 1 board.' % build_name)
+
+  def testOverlaySettings(self):
+    """Verify overlays and push_overlays have legal values."""
+    for build_name, config in self.all_configs.iteritems():
+      overlays = config['overlays']
+      push_overlays = config['push_overlays']
+
+      self.assertTrue(overlays in [None, 'public', 'private', 'both'],
+                      'Config %s: has unexpected overlays value.' % build_name)
+      self.assertTrue(
+          push_overlays in [None, 'public', 'private', 'both'],
+          'Config %s: has unexpected push_overlays value.' % build_name)
+
+      if overlays == None:
+        subset = [None]
+      elif overlays == 'public':
+        subset = [None, 'public']
+      elif overlays == 'private':
+        subset = [None, 'private']
+      elif overlays == 'both':
+        subset = [None, 'public', 'private', 'both']
+
+      self.assertTrue(
+          push_overlays in subset,
+          ('Config %s: push_overlays should be a subset of overlays.' %
+           build_name))
+
+  def testOverlayMaster(self):
+    """Verify that only one master is pushing uprevs for each overlay."""
+    masters = {}
+    for build_name, config in self.all_configs.iteritems():
+      overlays = config['overlays']
+      push_overlays = config['push_overlays']
+      if (overlays and push_overlays and config['uprev'] and config['master']
+          and not config['branch']):
+        other_master = masters.get(push_overlays)
+        err_msg = 'Found two masters for push_overlays=%s: %s and %s'
+        self.assertFalse(
+            other_master, err_msg % (push_overlays, build_name, other_master))
+        masters[push_overlays] = build_name
+
+    if 'both' in masters:
+      self.assertEquals(len(masters), 1, 'Found too many masters.')
+
+  def testChromeRev(self):
+    """Verify chrome_rev has an expected value"""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertTrue(
+          config['chrome_rev'] in constants.VALID_CHROME_REVISIONS + [None],
+          'Config %s: has unexpected chrome_rev value.' % build_name)
+      self.assertFalse(
+          config['chrome_rev'] == constants.CHROME_REV_LOCAL,
+          'Config %s: has unexpected chrome_rev_local value.' % build_name)
+      if config['chrome_rev']:
+        self.assertTrue(
+            config_lib.IsPFQType(config['build_type']),
+            'Config %s: has chrome_rev but is not a PFQ.' % build_name)
+
+  def testValidVMTestType(self):
+    """Verify vm_tests has an expected value"""
+    for build_name, config in self.all_configs.iteritems():
+      if config['vm_tests'] is None:
+        continue
+      for test_type in config['vm_tests']:
+        self.assertTrue(
+            test_type in constants.VALID_VM_TEST_TYPES,
+            'Config %s: has unexpected vm test type value.' % build_name)
+
+  def testImageTestMustHaveBaseImage(self):
+    """Verify image_test build is only enabled with 'base' in images."""
+    for build_name, config in self.all_configs.iteritems():
+      if config.get('image_test', False):
+        self.assertTrue(
+            'base' in config['images'],
+            'Build %s runs image_test but does not have base image' %
+            build_name)
+
+  def testBuildType(self):
+    """Verifies that all configs use valid build types."""
+    for build_name, config in self.all_configs.iteritems():
+      # For builders that have explicit classes, this check doesn't make sense.
+      if config['builder_class_name']:
+        continue
+      self.assertIn(config['build_type'], constants.VALID_BUILD_TYPES,
+                    'Config %s: has unexpected build_type value.' % build_name)
+
+  def testGCCGitHash(self):
+    """Verifies that gcc_githash is not set without setting latest_toolchain."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['gcc_githash']:
+        self.assertTrue(
+            config['latest_toolchain'],
+            'Config %s: has gcc_githash but not latest_toolchain.' % build_name)
+
+  def testBuildToRun(self):
+    """Verify we don't try to run tests without building them."""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertFalse(
+          isinstance(config['useflags'], list) and
+          '-build_tests' in config['useflags'] and config['vm_tests'],
+          'Config %s: has vm_tests and use -build_tests.' % build_name)
+
+  def testSyncToChromeSdk(self):
+    """Verify none of the configs build chrome sdk but don't sync chrome."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['sync_chrome'] is not None and not config['sync_chrome']:
+        self.assertFalse(
+            config['chrome_sdk'],
+            'Config %s: has chrome_sdk but not sync_chrome.' % build_name)
+
+  def testOverrideVmTestsOnly(self):
+    """VM/unit tests listed should also be supported."""
+    for build_name, config in self.all_configs.iteritems():
+      if config.vm_tests_override is not None:
+        for test in config.vm_tests:
+          self.assertIn(
+              test, config.vm_tests_override,
+              'Config %s: has %s VM test, not in override (%s, %s).' % \
+              (build_name, test, config.vm_tests, config.vm_tests_override))
+
+  def testHWTestsIFFArchivingHWTestArtifacts(self):
+    """Make sure all configs upload artifacts that need them for hw testing."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['hw_tests']:
+        self.assertTrue(
+            config['upload_hw_test_artifacts'],
+            "%s is trying to run hw tests without uploading payloads." %
+            build_name)
+
+  def testValidUnifiedMasterConfig(self):
+    """Make sure any unified master configurations are valid."""
+    for build_name, config in self.all_configs.iteritems():
+      error = 'Unified config for %s has invalid values' % build_name
+      # Unified masters must be internal and must rev both overlays.
+      if config['master']:
+        self.assertTrue(
+            config['internal'] and config['manifest_version'], error)
+      elif not config['master'] and config['manifest_version']:
+        # Unified slaves can rev either public or both depending on whether
+        # they are internal or not.
+        if not config['internal']:
+          self.assertEqual(config['overlays'], constants.PUBLIC_OVERLAYS, error)
+        elif config_lib.IsCQType(config['build_type']):
+          self.assertEqual(config['overlays'], constants.BOTH_OVERLAYS, error)
+
+  def testGetSlaves(self):
+    """Make sure every master has a sane list of slaves"""
+    for build_name, config in self.all_configs.iteritems():
+      if config.master:
+        configs = self.all_configs.GetSlavesForMaster(config)
+        self.assertEqual(
+            len(map(repr, configs)), len(set(map(repr, configs))),
+            'Duplicate board in slaves of %s will cause upload prebuilts'
+            ' failures' % build_name)
+
+        # Our logic for calculating what slaves have completed their critical
+        # stages will break if the master is considered a slave of itself,
+        # because db.GetSlaveStages(...) doesn't include master stages.
+        if config.build_type == constants.PALADIN_TYPE:
+          self.assertEquals(
+              config.boards, [],
+              'Master paladin %s cannot have boards.' % build_name)
+          self.assertNotIn(
+              build_name, [x.name for x in configs],
+              'Master paladin %s cannot be a slave of itself.' % build_name)
+
+  def testGetSlavesOnTrybot(self):
+    """Make sure every master has a sane list of slaves"""
+    mock_options = mock.Mock()
+    mock_options.remote_trybot = True
+    for _, config in self.all_configs.iteritems():
+      if config['master']:
+        configs = self.all_configs.GetSlavesForMaster(config, mock_options)
+        self.assertEqual([], configs)
+
+  def testFactoryFirmwareValidity(self):
+    """Ensures that firmware/factory branches have at least 1 valid name."""
+    tracking_branch = git.GetChromiteTrackingBranch()
+    for branch in ['firmware', 'factory']:
+      if tracking_branch.startswith(branch):
+        saw_config_for_branch = False
+        for build_name in self.all_configs:
+          if build_name.endswith('-%s' % branch):
+            self.assertFalse('release' in build_name,
+                             'Factory|Firmware release builders should not '
+                             'contain release in their name.')
+            saw_config_for_branch = True
+
+        self.assertTrue(
+            saw_config_for_branch, 'No config found for %s branch. '
+            'As this is the %s branch, all release configs that are being used '
+            'must end in %s.' % (branch, tracking_branch, branch))
+
+  def testBuildTests(self):
+    """Verify that we don't try to use tests without building them."""
+
+    for build_name, config in self.all_configs.iteritems():
+      if not config['build_tests']:
+        for flag in ('factory_toolkit', 'vm_tests', 'hw_tests'):
+          self.assertFalse(
+              config[flag],
+              'Config %s set %s without build_tests.' % (build_name, flag))
+
+  def testAFDOInBackground(self):
+    """Verify that we don't try to build or use AFDO data in the background."""
+    for build_name, config in self.all_configs.iteritems():
+      if config.build_packages_in_background:
+        # It is unsupported to use the build_packages_in_background flags with
+        # the afdo_generate or afdo_generate_min config options.
+        msg = 'Config %s uses build_packages_in_background with afdo_%s'
+        self.assertFalse(config.afdo_generate, msg % (build_name, 'generate'))
+        self.assertFalse(config.afdo_generate_min, msg % (build_name,
+                                                          'generate_min'))
+
+  def testReleaseGroupInBackground(self):
+    """Verify build_packages_in_background settings for release groups.
+
+    For each release group, the first builder should be set to run in the
+    foreground (to build binary packages), and the remainder of the builders
+    should be set to run in parallel (to install the binary packages.)
+    """
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith('-release-group'):
+        msg = 'Config %s should not build_packages_in_background'
+        self.assertFalse(config.build_packages_in_background, msg % build_name)
+
+        self.assertTrue(
+            config.child_configs,
+            'Config %s should have child configs' % build_name)
+        first_config = config.child_configs[0]
+        msg = 'Primary config for %s should not build_packages_in_background'
+        self.assertFalse(first_config.build_packages_in_background,
+                         msg % build_name)
+
+        msg = 'Child config %s for %s should build_packages_in_background'
+        for child_config in config.child_configs[1:]:
+          self.assertTrue(child_config.build_packages_in_background,
+                          msg % (child_config.name, build_name))
+
+  def testAFDOSameInChildConfigs(self):
+    """Verify that 'afdo_use' is the same for all children in a group."""
+    msg = ('Child config %s for %s should have same value for afdo_use '
+           'as other children')
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith('-group'):
+        prev_value = None
+        self.assertTrue(config.child_configs,
+                        'Config %s should have child configs' % build_name)
+        for child_config in config.child_configs:
+          if prev_value is None:
+            prev_value = child_config.afdo_use
+          else:
+            self.assertEqual(child_config.afdo_use, prev_value,
+                             msg % (child_config.name, build_name))
+
+  def testReleaseAFDOConfigs(self):
+    """Verify that <board>-release-afdo config have generate and use children.
+
+    These configs should have a 'generate' and a 'use' child config. Also,
+    any 'generate' and 'use' configs should be children of a release-afdo
+    config.
+    """
+    msg = 'Config %s should have %s as a parent'
+    parent_suffix = config_lib.CONFIG_TYPE_RELEASE_AFDO
+    generate_suffix = '%s-generate' % parent_suffix
+    use_suffix = '%s-use' % parent_suffix
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith(parent_suffix):
+        self.assertEqual(
+            len(config.child_configs), 2,
+            'Config %s should have 2 child configs' % build_name)
+        for child_config in config.child_configs:
+          child_name = child_config.name
+          self.assertTrue(child_name.endswith(generate_suffix) or
+                          child_name.endswith(use_suffix),
+                          'Config %s has wrong %s child' %
+                          (build_name, child_config))
+      if build_name.endswith(generate_suffix):
+        parent_config_name = build_name.replace(generate_suffix,
+                                                parent_suffix)
+        self.assertTrue(parent_config_name in self.all_configs,
+                        msg % (build_name, parent_config_name))
+      if build_name.endswith(use_suffix):
+        parent_config_name = build_name.replace(use_suffix,
+                                                parent_suffix)
+        self.assertTrue(parent_config_name in self.all_configs,
+                        msg % (build_name, parent_config_name))
+
+  def testNoGrandChildConfigs(self):
+    """Verify that no child configs have a child config."""
+    for build_name, config in self.all_configs.iteritems():
+      for child_config in config.child_configs:
+        for grandchild_config in child_config.child_configs:
+          self.fail('Config %s has grandchild %s' % (build_name,
+                                                     grandchild_config.name))
+
+  def testUseChromeLKGMImpliesInternal(self):
+    """Currently use_chrome_lkgm refers only to internal manifests."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['use_chrome_lkgm']:
+        self.assertTrue(
+            config['internal'],
+            'Chrome lkgm currently only works with an internal manifest: %s' % (
+                build_name,))
+
+  def _HasValidSuffix(self, config_name, config_types):
+    """Given a config_name, see if it has a suffix in config_types.
+
+    Args:
+      config_name: Name of config to compare.
+      config_types: A tuple/list of config suffixes.
+
+    Returns:
+      True, if the config has a suffix matching one of the types.
+    """
+    for config_type in config_types:
+      if config_name.endswith('-' + config_type) or config_name == config_type:
+        return True
+
+    return False
+
+  def testNonOverlappingConfigTypes(self):
+    """Test that a config can only match one build suffix."""
+    # This test belongs in config_lib_unittest, except nobody else cares.
+    for config_type in config_lib.CONFIG_TYPE_DUMP_ORDER:
+      trimmed_configs = list(config_lib.CONFIG_TYPE_DUMP_ORDER)
+      trimmed_configs.remove(config_type)
+      self.assertFalse(self._HasValidSuffix(config_type, trimmed_configs))
+
+  def testConfigTypesComplete(self):
+    """Verify CONFIG_TYPE_DUMP_ORDER contains all valid config types."""
+    for config_name in self.all_configs:
+      self.assertTrue(
+          self._HasValidSuffix(config_name, config_lib.CONFIG_TYPE_DUMP_ORDER),
+          '%s did not match any types in %s' %
+          (config_name, 'config_lib.CONFIG_TYPE_DUMP_ORDER'))
+
+  def testCantBeBothTypesOfLKGM(self):
+    """Using lkgm and chrome_lkgm doesn't make sense."""
+    for config in self.all_configs.values():
+      self.assertFalse(config['use_lkgm'] and config['use_chrome_lkgm'])
+
+  def testNoDuplicateSlavePrebuilts(self):
+    """Test that no two same-board paladin slaves upload prebuilts."""
+    for cfg in self.all_configs.values():
+      if cfg['build_type'] == constants.PALADIN_TYPE and cfg['master']:
+        slaves = self.all_configs.GetSlavesForMaster(cfg)
+        prebuilt_slaves = [s for s in slaves if s['prebuilts']]
+        # Dictionary from board name to builder name that uploads prebuilt
+        prebuilt_slave_boards = {}
+        for slave in prebuilt_slaves:
+          for board in slave['boards']:
+            self.assertFalse(prebuilt_slave_boards.has_key(board),
+                             'Configs %s and %s both upload prebuilts for '
+                             'board %s.' % (prebuilt_slave_boards.get(board),
+                                            slave['name'],
+                                            board))
+            prebuilt_slave_boards[board] = slave['name']
+
+  def testNoDuplicateWaterfallNames(self):
+    """Tests that no two configs specify same waterfall name."""
+    waterfall_names = set()
+    for config in self.all_configs.values():
+      wn = config['buildbot_waterfall_name']
+      if wn is not None:
+        self.assertNotIn(wn, waterfall_names,
+                         'Duplicate waterfall name %s.' % wn)
+        waterfall_names.add(wn)
+
+  def testCantBeBothTypesOfAFDO(self):
+    """Using afdo_generate and afdo_use together doesn't work."""
+    for config in self.all_configs.values():
+      self.assertFalse(config['afdo_use'] and config['afdo_generate'])
+      self.assertFalse(config['afdo_use'] and config['afdo_generate_min'])
+      self.assertFalse(config['afdo_generate'] and config['afdo_generate_min'])
+
+  def testValidPrebuilts(self):
+    """Verify all builders have valid prebuilt values."""
+    for build_name, config in self.all_configs.iteritems():
+      msg = 'Config %s: has unexpected prebuilts value.' % build_name
+      valid_values = (False, constants.PRIVATE, constants.PUBLIC)
+      self.assertTrue(config['prebuilts'] in valid_values, msg)
+
+  def testInternalPrebuilts(self):
+    for build_name, config in self.all_configs.iteritems():
+      if (config['internal'] and
+          config['build_type'] != constants.CHROME_PFQ_TYPE):
+        msg = 'Config %s is internal but has public prebuilts.' % build_name
+        self.assertNotEqual(config['prebuilts'], constants.PUBLIC, msg)
+
+  def testValidHWTestPriority(self):
+    """Verify that hw test priority is valid."""
+    for build_name, config in self.all_configs.iteritems():
+      for test_config in config['hw_tests']:
+        self.assertTrue(
+            test_config.priority in constants.HWTEST_VALID_PRIORITIES,
+            '%s has an invalid hwtest priority.' % build_name)
+
+  def testAllBoardsExist(self):
+    """Verifies that all config boards are in _all_boards."""
+    for build_name, config in self.all_configs.iteritems():
+      for board in config['boards']:
+        self.assertIn(board, chromeos_config._all_boards,
+                      'Config %s has unknown board %s.' %
+                      (build_name, board))
+
+  def testPushImagePaygenDependancies(self):
+    """Paygen requires PushImage."""
+    for build_name, config in self.all_configs.iteritems():
+
+      # paygen can't complete without push_image, except for payloads
+      # where --channel arguments meet the requirements.
+      if config['paygen']:
+        self.assertTrue(config['push_image'] or
+                        config['build_type'] == constants.PAYLOADS_TYPE,
+                        '%s has paygen without push_image' % build_name)
+
+  def testPaygenTestDependancies(self):
+    """paygen testing requires upload_hw_test_artifacts."""
+    for build_name, config in self.all_configs.iteritems():
+
+      # This requirement doesn't apply to payloads builds. Payloads are
+      # using artifacts from a previous build.
+      if build_name.endswith('-payloads'):
+        continue
+
+      if config['paygen'] and not config['paygen_skip_testing']:
+        self.assertTrue(config['upload_hw_test_artifacts'],
+                        '%s is not upload_hw_test_artifacts, but also not'
+                        ' paygen_skip_testing' % build_name)
+
+  def testPayloadImageIsBuilt(self):
+    for build_name, config in self.all_configs.iteritems():
+      if config.payload_image is not None:
+        self.assertNotEqual('recovery', config.payload_image,
+                            '%s wants to generate payloads from recovery '
+                            'images, which is not allowed.' % build_name)
+        self.assertIn(config.payload_image, config.images,
+                      '%s builds payloads from %s, which is not in images '
+                      'list %s' % (build_name, config.payload_image,
+                                   config.images))
+
+  def testBuildPackagesForRecoveryImage(self):
+    """Tests that we build the packages required for recovery image."""
+    for build_name, config in self.all_configs.iteritems():
+      if 'recovery' in config.images:
+        if not config.packages:
+          # No packages are specified. Defaults to build all packages.
+          continue
+
+        self.assertIn('chromeos-base/chromeos-initramfs',
+                      config.packages,
+                      '%s does not build chromeos-initramfs, which is required '
+                      'for creating the recovery image' % build_name)
+
+  def testBuildRecoveryImageFlags(self):
+    """Ensure the right flags are disabled when building the recovery image."""
+    incompatible_flags = ['paygen', 'signer_tests']
+    for build_name, config in self.all_configs.iteritems():
+      for flag in incompatible_flags:
+        if config[flag] and config.build_type != constants.PAYLOADS_TYPE:
+          self.assertIn('recovery', config.images,
+                        '%s does not build the recovery image, which is '
+                        'incompatible with %s=True' % (build_name, flag))
+
+  def testBuildBaseImageForRecoveryImage(self):
+    """Tests that we build the packages required for recovery image."""
+    for build_name, config in self.all_configs.iteritems():
+      if 'recovery' in config.images:
+        self.assertIn('base', config.images,
+                      '%s does not build the base image, which is required for '
+                      'building the recovery image' % build_name)
+
+  def testChildConfigsNotImportantInReleaseGroup(self):
+    """Verify that configs in an important group are not important."""
+    msg = ('Child config %s for %s should not be important because %s is '
+           'already important')
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith('-release-group') and config['important']:
+        for child_config in config.child_configs:
+          self.assertFalse(child_config.important,
+                           msg % (child_config.name, build_name, build_name))
+
+  def testFullCQBuilderDoNotRunHWTest(self):
+    """Full CQ configs should not run HWTest."""
+    msg = ('%s should not be a full builder and run HWTest for '
+           'performance reasons')
+    for build_name, config in self.all_configs.iteritems():
+      if config.build_type == constants.PALADIN_TYPE:
+        self.assertFalse(config.chrome_binhost_only and config.hw_tests,
+                         msg % build_name)
+
+  def testExternalConfigsDoNotUseInternalFeatures(self):
+    """External configs should not use chrome_internal, or official.xml."""
+    msg = ('%s is not internal, so should not use chrome_internal, or an '
+           'internal manifest')
+    for build_name, config in self.all_configs.iteritems():
+      if not config['internal']:
+        self.assertFalse('chrome_internal' in config['useflags'],
+                         msg % build_name)
+        self.assertNotEqual(config.get('manifest'),
+                            constants.OFFICIAL_MANIFEST,
+                            msg % build_name)
+
+  def testNoShadowedUseflags(self):
+    """Configs should not have both useflags x and -x."""
+    msg = ('%s contains useflag %s and -%s.')
+    for build_name, config in self.all_configs.iteritems():
+      useflag_set = set(config['useflags'])
+      for flag in useflag_set:
+        if not flag.startswith('-'):
+          self.assertFalse('-' + flag in useflag_set,
+                           msg % (build_name, flag, flag))
+
+  def testHealthCheckEmails(self):
+    """Configs should only have valid email addresses or aliases"""
+    msg = ('%s contains an invalid tree alias or email address: %s')
+    for build_name, config in self.all_configs.iteritems():
+      health_alert_recipients = config['health_alert_recipients']
+      for recipient in health_alert_recipients:
+        self.assertTrue(re.match(r'[^@]+@[^@]+\.[^@]+', recipient) or
+                        recipient in constants.SHERIFF_TYPE_TO_URL.keys(),
+                        msg % (build_name, recipient))
+
+  def testCheckBuilderClass(self):
+    """Verify builder_class_name is a valid value."""
+    for build_name, config in self.all_configs.iteritems():
+      builder_class_name = config['builder_class_name']
+      if builder_class_name is None:
+        continue
+
+      cls = builders.GetBuilderClass(builder_class_name)
+      self.assertTrue(issubclass(cls, generic_builders.Builder),
+                      msg=('config %s has a broken builder_class_name' %
+                           build_name))
+
+  def testDistinctBoardSets(self):
+    """Verify that distinct board sets are distinct."""
+    # Every board should be in exactly one of the distinct board sets.
+    for board in chromeos_config._all_boards:
+      found = False
+      for s in chromeos_config._distinct_board_sets:
+        if board in s:
+          if found:
+            assert False, '%s in multiple board sets.' % board
+          else:
+            found = True
+      if not found:
+        assert False, '%s in no board sets' % board
+    for s in chromeos_config._distinct_board_sets:
+      for board in s - chromeos_config._all_boards:
+        assert False, ('%s in _distinct_board_sets but not in _all_boards' %
+                       board)
+
+
+class OverrideForTrybotTest(GenerateChromeosConfigTestBase):
+  """Test config override functionality."""
+
+  def testVmTestOverride(self):
+    """Verify that vm_tests override for trybots pay heed to original config."""
+    mock_options = mock.Mock()
+    old = self.all_configs['x86-mario-paladin']
+    new = config_lib.OverrideConfigForTrybot(old, mock_options)
+    self.assertEquals(new['vm_tests'], [constants.SMOKE_SUITE_TEST_TYPE,
+                                        constants.SIMPLE_AU_TEST_TYPE,
+                                        constants.CROS_VM_TEST_TYPE])
+
+    # Don't override vm tests for arm boards.
+    old = self.all_configs['daisy-paladin']
+    new = config_lib.OverrideConfigForTrybot(old, mock_options)
+    self.assertEquals(new['vm_tests'], old['vm_tests'])
+
+    # Don't override vm tests for brillo boards.
+    old = self.all_configs['storm-paladin']
+    new = config_lib.OverrideConfigForTrybot(old, mock_options)
+    self.assertEquals(new['vm_tests'], old['vm_tests'])
+
+  def testWaterfallManualConfigIsValid(self):
+    """Verify the correctness of the manual waterfall configuration."""
+    all_build_names = set(self.all_configs.iterkeys())
+    redundant = set()
+    seen = set()
+    waterfall_iter = chromeos_config._waterfall_config_map.iteritems()
+    for waterfall, names in waterfall_iter:
+      for build_name in names:
+        # Every build in the configuration map must be valid.
+        self.assertTrue(build_name in all_build_names,
+                        "Invalid build name in manual waterfall config: %s" % (
+                            build_name,))
+        # No build should appear in multiple waterfalls.
+        self.assertFalse(build_name in seen,
+                         "Duplicate manual config for board: %s" % (
+                             build_name,))
+        seen.add(build_name)
+
+        # The manual configuration must be applied and override any default
+        # configuration.
+        config = self.all_configs[build_name]
+        self.assertEqual(config['active_waterfall'], waterfall,
+                         "Manual waterfall membership is not in the "
+                         "configuration for: %s" % (build_name,))
+
+
+        default_waterfall = chromeos_config.GetDefaultWaterfall(config)
+        if config['active_waterfall'] == default_waterfall:
+          redundant.add(build_name)
+
+    # No configurations should be redundant with defaults.
+    self.assertFalse(redundant,
+                     "Manual waterfall membership in "
+                     "`_waterfall_config_map` is redundant for these "
+                     "configs: %s" % (sorted(redundant),))
+
+  def testNoDuplicateCanaryBuildersOnWaterfall(self):
+    seen = {}
+    for config in self.all_configs.itervalues():
+      waterfall = config['active_waterfall']
+      btype = config['build_type']
+      if not (waterfall and config_lib.IsCanaryType(btype)):
+        continue
+
+      waterfall_seen = seen.setdefault(waterfall, set())
+      stack = [config]
+      while stack:
+        current_config = stack.pop()
+        self.assertNotIn(current_config['name'], waterfall_seen,
+                         "Multiple builders for '%s' on '%s' waterfall" % (
+                             current_config['name'], waterfall))
+        waterfall_seen.add(current_config['name'])
+        stack += current_config['child_configs']
+
+  def testBinhostTest(self):
+    """Builders with the binhost_test setting shouldn't have boards."""
+    for config in self.all_configs.values():
+      if config.binhost_test:
+        self.assertEqual(config.boards, [])
+
+
+class TemplateTest(GenerateChromeosConfigTestBase):
+  """Tests for templates."""
+
+  def testTemplatesUsed(self):
+    """Test that all templates are used."""
+    templates_used = set(cfg['_template'] for cfg in self.all_configs.values())
+    templates = set([None] + self.all_configs.GetTemplates().keys())
+    self.assertEqual(templates, templates_used)
+
+  def testConfigNamesMatchTemplate(self):
+    """Test that all configs have names that match their templates."""
+    for name, config in self.all_configs.iteritems():
+      template = config._template
+      if template:
+        child_configs = config.child_configs
+        if not child_configs:
+          msg = '%s should end with %s to match its template'
+          self.assertTrue(name.endswith(template), msg % (name, template))
+        else:
+          msg = 'Child config of %s has name that does not match its template'
+          self.assertTrue(child_configs[0].name.endswith(template),
+                          msg % name)
+
+      for other in self.all_configs.GetTemplates():
+        if name.endswith(other) and other != template:
+          if template:
+            msg = '%s has more specific template: %s' % (name, other)
+            self.assertGreater(len(template), len(other), msg)
+          else:
+            msg = '%s should have %s as template' % (name, other)
+            self.assertFalse(name, msg)
+
+
+class SiteInterfaceTest(GenerateChromeosConfigTestBase):
+  """Test enforcing site parameters for a chromeos SiteConfig."""
+
+  def testAssertSiteParameters(self):
+    """Test that a chromeos SiteConfig contains the necessary parameters."""
+    # Check that our config contains site-independent parameters.
+    self.assertTrue(
+        config_lib_unittest.AssertSiteIndependentParameters(self.all_configs))
+
+    # Enumerate the necessary chromeos site parameter keys.
+    chromeos_params = config_lib.DefaultSiteParameters().keys()
+
+    # Check that our config contains all chromeos specific site parameters.
+    site_params = self.all_configs.params
+    self.assertTrue(all([x in site_params for x in chromeos_params]))
diff --git a/cbuildbot/chromeos_version_test.sh b/cbuildbot/chromeos_version_test.sh
new file mode 100755
index 0000000..ebd6801
--- /dev/null
+++ b/cbuildbot/chromeos_version_test.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# ChromeOS version information
+#
+# This file is usually sourced by other build scripts, but can be run
+# directly to see what it would do.
+#
+# Version numbering scheme is much like Chrome's, with the addition of
+# double-incrementing branch number so trunk is always odd.
+
+#############################################################################
+# SET VERSION NUMBERS
+#############################################################################
+# Major/minor versions.
+# Primarily for product marketing.
+export CHROMEOS_VERSION_MAJOR=0
+export CHROMEOS_VERSION_MINOR=13
+
+# Branch number.
+# Increment by 1 in a new release branch.
+# Increment by 2 in trunk after making a release branch.
+# Does not reset on a major/minor change (always increases).
+# (Trunk is always odd; branches are always even).
+export CHROMEOS_VERSION_BRANCH=507
+
+# Patch number.
+# Increment by 1 each release on a branch.
+# Reset to 0 when increasing branch number.
+export CHROMEOS_VERSION_PATCH=87
+
+# Official builds must set CHROMEOS_OFFICIAL=1.
+if [ ${CHROMEOS_OFFICIAL:-0} -ne 1 ] && [ "${USER}" != "chrome-bot" ]; then
+  # For developer builds, overwrite CHROMEOS_VERSION_PATCH with a date string
+  # for use by auto-updater.
+  export CHROMEOS_VERSION_PATCH=$(date +%Y_%m_%d_%H%M)
+fi
+
+# Version string. Not indentied to appease bash.
+export CHROMEOS_VERSION_STRING=\
+"${CHROMEOS_VERSION_MAJOR}.${CHROMEOS_VERSION_MINOR}"\
+".${CHROMEOS_VERSION_BRANCH}.${CHROMEOS_VERSION_PATCH}"
+
+# Set CHROME values (Used for releases) to pass to chromeos-chrome-bin ebuild
+# URL to chrome archive
+export CHROME_BASE=
+# export CHROME_VERSION from incoming value or NULL and let ebuild default
+export CHROME_VERSION="$CHROME_VERSION"
+
+# Print (and remember) version info.
+echo "ChromeOS version information:"
+env | egrep '^CHROMEOS_VERSION|CHROME_' | sed 's/^/    /'
diff --git a/cbuildbot/chroot_lib.py b/cbuildbot/chroot_lib.py
new file mode 100644
index 0000000..970fc2f
--- /dev/null
+++ b/cbuildbot/chroot_lib.py
@@ -0,0 +1,95 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions for managing chroots.
+
+Currently this just contains functions for reusing chroots for incremental
+building.
+"""
+
+from __future__ import print_function
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import sudo
+
+
+CHROOT_VERSION_FILE = 'etc/cros_manifest_version'
+
+
+class ChrootManager(object):
+  """Class for managing chroots and chroot versions."""
+
+  def __init__(self, build_root):
+    """Constructor.
+
+    Args:
+      build_root: The root of the checkout.
+    """
+    self._build_root = build_root
+
+  def _ChrootVersionPath(self, chroot=None):
+    """Get the path to the chroot version file for |chroot|.
+
+    Args:
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+
+    Returns:
+      The path to the chroot version file.
+    """
+    if chroot is None:
+      chroot = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    return os.path.join(chroot, CHROOT_VERSION_FILE)
+
+  def GetChrootVersion(self, chroot=None):
+    """Get the version of the checkout used to create |chroot|.
+
+    Args:
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+
+    Returns:
+      The version of Chrome OS used to build |chroot|. E.g. 6394.0.0-rc3.
+      If the chroot does not exist, or there is no version file, returns None.
+    """
+    chroot_version_file = self._ChrootVersionPath(chroot)
+    if not os.path.exists(chroot_version_file):
+      return None
+    return osutils.ReadFile(chroot_version_file).strip()
+
+  def EnsureChrootAtVersion(self, version):
+    """Ensure the current chroot is at version |version|.
+
+    If our chroot has version, use it. Otherwise, blow away the chroot.
+
+    Args:
+      version: Version of the chroot to look for. E.g. 6394.0.0-rc3
+    """
+    chroot = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    if version and self.GetChrootVersion(chroot) == version:
+      logging.PrintBuildbotStepText('(Using existing chroot)')
+    else:
+      logging.PrintBuildbotStepText('(Using fresh chroot)')
+      osutils.RmDir(chroot, ignore_missing=True, sudo=True)
+
+  def ClearChrootVersion(self, chroot=None):
+    """Clear the version in the specified |chroot|.
+
+    Args:
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+    """
+    chroot_version_file = self._ChrootVersionPath(chroot)
+    osutils.RmDir(chroot_version_file, ignore_missing=True, sudo=True)
+
+  def SetChrootVersion(self, version, chroot=None):
+    """Update the version file in the chroot to |version|.
+
+    Args:
+      version: Version to use. E.g. 6394.0.0-rc3
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+    """
+    chroot_version_file = self._ChrootVersionPath(chroot)
+    if os.path.exists(os.path.dirname(chroot_version_file)):
+      sudo.SetFileContents(chroot_version_file, version)
diff --git a/cbuildbot/chroot_lib_unittest b/cbuildbot/chroot_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/chroot_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/chroot_lib_unittest.py b/cbuildbot/chroot_lib_unittest.py
new file mode 100644
index 0000000..f40476f
--- /dev/null
+++ b/cbuildbot/chroot_lib_unittest.py
@@ -0,0 +1,59 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for chroot management functions."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import chroot_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+
+class TestChrootManager(cros_test_lib.TempDirTestCase):
+  """Class that tests the ChrootManager."""
+
+  sudo_cleanup = True
+
+  def setUp(self):
+    self.chroot_manager = chroot_lib.ChrootManager(self.tempdir)
+
+  def testGetChrootVersionWithNoChroot(self):
+    """If there's no chroot, GetChrootVersion returns None."""
+    self.assertIsNone(self.chroot_manager.GetChrootVersion('foo'))
+
+  def testSetChrootVersionWithNoChroot(self):
+    """If there's no chroot, SetChrootVersion does nothing."""
+    self.chroot_manager.SetChrootVersion('foo')
+    self.assertIsNone(self.chroot_manager.GetChrootVersion())
+
+  def testSetChrootVersionWithChroot(self):
+    """SetChrootVersion sets the chroot version."""
+    osutils.SafeMakedirs(os.path.join(self.tempdir, 'chroot', 'etc'))
+    self.chroot_manager.SetChrootVersion('foo')
+    self.assertEquals('foo', self.chroot_manager.GetChrootVersion())
+
+  def testClearChrootVersion(self):
+    """SetChrootVersion sets the chroot version."""
+    osutils.SafeMakedirs(os.path.join(self.tempdir, 'chroot', 'etc'))
+    self.chroot_manager.SetChrootVersion('foo')
+    self.assertEquals('foo', self.chroot_manager.GetChrootVersion())
+    self.chroot_manager.ClearChrootVersion()
+    self.assertIsNone(self.chroot_manager.GetChrootVersion())
+
+  def testUseExistingChroot(self):
+    """Tests that EnsureChrootAtVersion succeeds with valid chroot."""
+    chroot = os.path.join(self.tempdir, 'chroot')
+    osutils.SafeMakedirs(os.path.join(chroot, 'etc'))
+    self.chroot_manager.SetChrootVersion('foo')
+    self.chroot_manager.EnsureChrootAtVersion('foo')
+    self.assertEquals(self.chroot_manager.GetChrootVersion(chroot), 'foo')
+
+  def testUseFreshChroot(self):
+    """Tests that EnsureChrootAtVersion succeeds with invalid chroot."""
+    chroot = os.path.join(self.tempdir, 'chroot')
+    self.chroot_manager.EnsureChrootAtVersion('foo')
+    self.assertEquals(self.chroot_manager.GetChrootVersion(chroot), None)
diff --git a/cbuildbot/commands.py b/cbuildbot/commands.py
new file mode 100644
index 0000000..84e6d96
--- /dev/null
+++ b/cbuildbot/commands.py
@@ -0,0 +1,2374 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the various individual commands a builder can run."""
+
+from __future__ import print_function
+
+import base64
+import collections
+import fnmatch
+import glob
+import json
+import multiprocessing
+import os
+import re
+import shutil
+import sys
+import tempfile
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import swarming_lib
+from chromite.cbuildbot import topology
+from chromite.cli.cros.tests import cros_vm_test
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import gs
+from chromite.lib import locking
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import path_util
+from chromite.lib import portage_util
+from chromite.lib import retry_util
+from chromite.lib import timeout_util
+from chromite.scripts import pushimage
+
+site_config = config_lib.GetConfig()
+
+
+_PACKAGE_FILE = '%(buildroot)s/src/scripts/cbuildbot_package.list'
+CHROME_KEYWORDS_FILE = ('/build/%(board)s/etc/portage/package.keywords/chrome')
+CHROME_UNMASK_FILE = ('/build/%(board)s/etc/portage/package.unmask/chrome')
+_CROS_ARCHIVE_URL = 'CROS_ARCHIVE_URL'
+_FACTORY_SHIM = 'factory_shim'
+_AUTOTEST_RPC_CLIENT = ('/b/build_internal/scripts/slave-internal/autotest_rpc/'
+                        'autotest_rpc_client.py')
+_AUTOTEST_RPC_HOSTNAME = 'master2'
+_LOCAL_BUILD_FLAGS = ['--nousepkg', '--reuse_pkgs_from_local_boards']
+UPLOADED_LIST_FILENAME = 'UPLOADED'
+STATEFUL_FILE = 'stateful.tgz'
+# For sorting through VM test results.
+_TEST_REPORT_FILENAME = 'test_report.log'
+_TEST_PASSED = 'PASSED'
+_TEST_FAILED = 'FAILED'
+# For swarming proxy
+_SWARMING_ADDITIONAL_TIMEOUT = 60 * 60
+_DEFAULT_HWTEST_TIMEOUT_MINS = 1440
+_SWARMING_EXPIRATION = 20 * 60
+_RUN_SUITE_PATH = '/usr/local/autotest/site_utils/run_suite.py'
+_ABORT_SUITE_PATH = '/usr/local/autotest/site_utils/abort_suite.py'
+
+
+# =========================== Command Helpers =================================
+
+def RunBuildScript(buildroot, cmd, chromite_cmd=False, **kwargs):
+  """Run a build script, wrapping exceptions as needed.
+
+  This wraps RunCommand(cmd, cwd=buildroot, **kwargs), adding extra logic to
+  help determine the cause of command failures.
+    - If a package fails to build, a PackageBuildFailure exception is thrown,
+      which lists exactly which packages failed to build.
+    - If the command fails for a different reason, a BuildScriptFailure
+      exception is thrown.
+
+  We detect what packages failed to build by creating a temporary status file,
+  and passing that status file to parallel_emerge via the
+  PARALLEL_EMERGE_STATUS_FILE variable.
+
+  Args:
+    buildroot: The root of the build directory.
+    cmd: The command to run.
+    chromite_cmd: Whether the command should be evaluated relative to the
+      chromite/bin subdir of the |buildroot|.
+    kwargs: Optional args passed to RunCommand; see RunCommand for specifics.
+      In addition, if 'sudo' kwarg is True, SudoRunCommand will be used.
+  """
+  assert not kwargs.get('shell', False), 'Cannot execute shell commands'
+  kwargs.setdefault('cwd', buildroot)
+  enter_chroot = kwargs.get('enter_chroot', False)
+  sudo = kwargs.pop('sudo', False)
+
+  if chromite_cmd:
+    cmd = cmd[:]
+    cmd[0] = os.path.join(buildroot, constants.CHROMITE_BIN_SUBDIR, cmd[0])
+    if enter_chroot:
+      cmd[0] = path_util.ToChrootPath(cmd[0])
+
+  # If we are entering the chroot, create status file for tracking what
+  # packages failed to build.
+  chroot_tmp = os.path.join(buildroot, 'chroot', 'tmp')
+  status_file = None
+  with cros_build_lib.ContextManagerStack() as stack:
+    if enter_chroot and os.path.exists(chroot_tmp):
+      kwargs['extra_env'] = (kwargs.get('extra_env') or {}).copy()
+      status_file = stack.Add(tempfile.NamedTemporaryFile, dir=chroot_tmp)
+      kwargs['extra_env'][constants.PARALLEL_EMERGE_STATUS_FILE_ENVVAR] = \
+          path_util.ToChrootPath(status_file.name)
+    runcmd = cros_build_lib.RunCommand
+    if sudo:
+      runcmd = cros_build_lib.SudoRunCommand
+    try:
+      return runcmd(cmd, **kwargs)
+    except cros_build_lib.RunCommandError as ex:
+      # Print the original exception.
+      logging.error('\n%s', ex)
+
+      # Check whether a specific package failed. If so, wrap the exception
+      # appropriately. These failures are usually caused by a recent CL, so we
+      # don't ever treat these failures as flaky.
+      if status_file is not None:
+        status_file.seek(0)
+        failed_packages = status_file.read().split()
+        if failed_packages:
+          raise failures_lib.PackageBuildFailure(ex, cmd[0], failed_packages)
+
+      # Looks like a generic failure. Raise a BuildScriptFailure.
+      raise failures_lib.BuildScriptFailure(ex, cmd[0])
+
+
+def ValidateClobber(buildroot):
+  """Do due diligence if user wants to clobber buildroot.
+
+  Args:
+    buildroot: buildroot that's potentially clobbered.
+
+  Returns:
+    True if the clobber is ok.
+  """
+  cwd = os.path.dirname(os.path.realpath(__file__))
+  if cwd.startswith(buildroot):
+    cros_build_lib.Die('You are trying to clobber this chromite checkout!')
+
+  if buildroot == '/':
+    cros_build_lib.Die('Refusing to clobber your system!')
+
+  if os.path.exists(buildroot):
+    return cros_build_lib.BooleanPrompt(default=False)
+  return True
+
+
+# =========================== Main Commands ===================================
+
+
+def BuildRootGitCleanup(buildroot):
+  """Put buildroot onto manifest branch. Delete branches created on last run.
+
+  Args:
+    buildroot: buildroot to clean up.
+  """
+  lock_path = os.path.join(buildroot, '.clean_lock')
+  deleted_objdirs = multiprocessing.Event()
+
+  def RunCleanupCommands(project, cwd):
+    with locking.FileLock(lock_path, verbose=False).read_lock() as lock:
+      # Calculate where the git repository is stored.
+      relpath = os.path.relpath(cwd, buildroot)
+      projects_dir = os.path.join(buildroot, '.repo', 'projects')
+      project_objects_dir = os.path.join(buildroot, '.repo', 'project-objects')
+      repo_git_store = '%s.git' % os.path.join(projects_dir, relpath)
+      repo_obj_store = '%s.git' % os.path.join(project_objects_dir, project)
+
+      try:
+        if os.path.isdir(cwd):
+          git.CleanAndDetachHead(cwd)
+          git.GarbageCollection(cwd)
+      except cros_build_lib.RunCommandError as e:
+        result = e.result
+        logging.PrintBuildbotStepWarnings()
+        logging.warning('\n%s', result.error)
+
+        # If there's no repository corruption, just delete the index.
+        corrupted = git.IsGitRepositoryCorrupted(cwd)
+        lock.write_lock()
+        logging.warning('Deleting %s because %s failed', cwd, result.cmd)
+        osutils.RmDir(cwd, ignore_missing=True)
+        if corrupted:
+          # Looks like the object dir is corrupted. Delete the whole repository.
+          deleted_objdirs.set()
+          for store in (repo_git_store, repo_obj_store):
+            logging.warning('Deleting %s as well', store)
+            osutils.RmDir(store, ignore_missing=True)
+
+      # Delete all branches created by cbuildbot.
+      if os.path.isdir(repo_git_store):
+        cmd = ['branch', '-D'] + list(constants.CREATED_BRANCHES)
+        git.RunGit(repo_git_store, cmd, error_code_ok=True)
+
+      if os.path.isdir(cwd):
+        # Above we deleted refs/heads/<branch> for each created branch, now we
+        # need to delete the bare ref <branch> if it was created somehow.
+        for ref in constants.CREATED_BRANCHES:
+          git.RunGit(cwd, ['update-ref', '-d', ref])
+
+
+  # Cleanup all of the directories.
+  dirs = [[attrs['name'], os.path.join(buildroot, attrs['path'])] for attrs in
+          git.ManifestCheckout.Cached(buildroot).ListCheckouts()]
+  parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
+
+  # repo shares git object directories amongst multiple project paths. If the
+  # first pass deleted an object dir for a project path, then other repositories
+  # (project paths) of that same project may now be broken. Do a second pass to
+  # clean them up as well.
+  if deleted_objdirs.is_set():
+    parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
+
+
+def CleanUpMountPoints(buildroot):
+  """Cleans up any stale mount points from previous runs."""
+  # Scrape it from /proc/mounts since it's easily accessible;
+  # additionally, unmount in reverse order of what's listed there
+  # rather than trying a reverse sorting; it's possible for
+  # mount /z /foon
+  # mount /foon/blah -o loop /a
+  # which reverse sorting cannot handle.
+  buildroot = os.path.realpath(buildroot).rstrip('/') + '/'
+  mounts = [mtab.destination for mtab in osutils.IterateMountPoints() if
+            mtab.destination.startswith(buildroot)]
+
+  for mount_pt in reversed(mounts):
+    osutils.UmountDir(mount_pt, lazy=True, cleanup=False)
+
+
+def WipeOldOutput(buildroot):
+  """Wipes out build output directory.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Delete image directories for this board name.
+  """
+  image_dir = os.path.join(buildroot, 'src', 'build', 'images')
+  osutils.RmDir(image_dir, ignore_missing=True, sudo=True)
+
+
+def MakeChroot(buildroot, replace, use_sdk, chrome_root=None, extra_env=None):
+  """Wrapper around make_chroot."""
+  cmd = ['cros_sdk', '--buildbot-log-version']
+  cmd.append('--create' if use_sdk else '--bootstrap')
+
+  if replace:
+    cmd.append('--replace')
+
+  if chrome_root:
+    cmd.append('--chrome_root=%s' % chrome_root)
+
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, extra_env=extra_env)
+
+
+def RunChrootUpgradeHooks(buildroot, chrome_root=None, extra_env=None):
+  """Run the chroot upgrade hooks in the chroot.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    chrome_root: The directory where chrome is stored.
+    extra_env: A dictionary of environment variables to set.
+  """
+  chroot_args = []
+  if chrome_root:
+    chroot_args.append('--chrome_root=%s' % chrome_root)
+
+  RunBuildScript(buildroot, ['./run_chroot_version_hooks'], enter_chroot=True,
+                 chroot_args=chroot_args, extra_env=extra_env)
+
+
+def RefreshPackageStatus(buildroot, boards, debug):
+  """Wrapper around refresh_package_status"""
+  # First run check_gdata_token to validate or refresh auth token.
+  cmd = ['check_gdata_token']
+  RunBuildScript(buildroot, cmd, chromite_cmd=True)
+
+  # Prepare refresh_package_status command to update the package spreadsheet.
+  cmd = ['refresh_package_status']
+
+  # Skip the host board if present.
+  board = ':'.join([b for b in boards if b != 'amd64-host'])
+  cmd.append('--board=%s' % board)
+
+  # Upload to the test spreadsheet only when in debug mode.
+  if debug:
+    cmd.append('--test-spreadsheet')
+
+  # Actually run prepared refresh_package_status command.
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+  # Disabling the auto-filing of Tracker issues for now - crbug.com/334260.
+  #SyncPackageStatus(buildroot, debug)
+
+
+def SyncPackageStatus(buildroot, debug):
+  """Wrapper around sync_package_status."""
+  # Run sync_package_status to create Tracker issues for outdated
+  # packages.  At the moment, this runs only for groups that have opted in.
+  basecmd = ['sync_package_status']
+  if debug:
+    basecmd.extend(['--pretend', '--test-spreadsheet'])
+
+  cmdargslist = [['--team=build'],
+                 ['--team=kernel', '--default-owner=arscott']]
+
+  for cmdargs in cmdargslist:
+    cmd = basecmd + cmdargs
+    RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+
+def SetSharedUserPassword(buildroot, password):
+  """Wrapper around set_shared_user_password.sh"""
+  if password is not None:
+    cmd = ['./set_shared_user_password.sh', password]
+    RunBuildScript(buildroot, cmd, enter_chroot=True)
+  else:
+    passwd_file = os.path.join(buildroot, 'chroot/etc/shared_user_passwd.txt')
+    osutils.SafeUnlink(passwd_file, sudo=True)
+
+
+def UpdateChroot(buildroot, usepkg, toolchain_boards=None, extra_env=None):
+  """Wrapper around update_chroot.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    usepkg: Whether to use binary packages when setting up the toolchain.
+    toolchain_boards: List of boards to always include.
+    extra_env: A dictionary of environmental variables to set during generation.
+  """
+  cmd = ['./update_chroot']
+
+  if not usepkg:
+    cmd.extend(['--nousepkg'])
+
+  if toolchain_boards:
+    cmd.extend(['--toolchain_boards', ','.join(toolchain_boards)])
+
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def SetupBoard(buildroot, board, usepkg, chrome_binhost_only=False,
+               extra_env=None, force=False, profile=None, chroot_upgrade=True):
+  """Wrapper around setup_board.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board to set up.
+    usepkg: Whether to use binary packages when setting up the board.
+    chrome_binhost_only: If set, only use binary packages on the board for
+      Chrome itself.
+    extra_env: A dictionary of environmental variables to set during generation.
+    force: Whether to remove the board prior to setting it up.
+    profile: The profile to use with this board.
+    chroot_upgrade: Whether to update the chroot. If the chroot is already up to
+      date, you can specify chroot_upgrade=False.
+  """
+  cmd = ['./setup_board', '--board=%s' % board,
+         '--accept_licenses=@CHROMEOS']
+
+  # This isn't the greatest thing, but emerge's dependency calculation
+  # isn't the speediest thing, so let callers skip this step when they
+  # know the system is up-to-date already.
+  if not chroot_upgrade:
+    cmd.append('--skip_chroot_upgrade')
+
+  if profile:
+    cmd.append('--profile=%s' % profile)
+
+  if not usepkg:
+    cmd.extend(_LOCAL_BUILD_FLAGS)
+
+  if chrome_binhost_only:
+    cmd.append('--chrome_binhost_only')
+
+  if force:
+    cmd.append('--force')
+
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+class MissingBinpkg(failures_lib.InfrastructureFailure):
+  """Error class for when we are missing an essential binpkg."""
+
+
+def VerifyBinpkg(buildroot, board, pkg, packages, extra_env=None):
+  """Verify that an appropriate binary package exists for |pkg|.
+
+  Using the depgraph from |packages|, check to see if |pkg| would be pulled in
+  as a binary or from source.  If |pkg| isn't installed at all, then ignore it.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board to set up.
+    pkg: The package to look for.
+    packages: The list of packages that get installed on |board|.
+    extra_env: A dictionary of environmental variables to set.
+
+  Raises:
+    If the package is found and is built from source, raise MissingBinpkg.
+    If the package is not found, or it is installed from a binpkg, do nothing.
+  """
+  cmd = ['emerge-%s' % board, '-pegNvq', '--with-bdeps=y',
+         '--color=n'] + list(packages)
+  result = RunBuildScript(buildroot, cmd, capture_output=True,
+                          enter_chroot=True, extra_env=extra_env)
+  pattern = r'^\[(ebuild|binary).*%s' % re.escape(pkg)
+  m = re.search(pattern, result.output, re.MULTILINE)
+  if m and m.group(1) == 'ebuild':
+    logging.info('(output):\n%s', result.output)
+    msg = 'Cannot find prebuilts for %s on %s' % (pkg, board)
+    raise MissingBinpkg(msg)
+
+
+def RunBinhostTest(buildroot, incremental=True):
+  """Test prebuilts for all boards, making sure everybody gets Chrome prebuilts.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    incremental: If True, run the incremental compatibility test.
+  """
+  cmd = ['../cbuildbot/binhost_test', '--log-level=debug']
+
+  # Non incremental tests are listed in a special test suite.
+  if not incremental:
+    cmd += ['NoIncremental']
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+
+def RunBranchUtilTest(buildroot, version):
+  """Tests that branch-util works at the given manifest version."""
+  with osutils.TempDir() as tempdir:
+    cmd = [
+        'cbuildbot',
+        'branch-util',
+        '--local',
+        '--skip-remote-push',
+        '--branch-name', 'test_branch',
+        '--version', version,
+        '--buildroot', tempdir,
+        '--no-buildbot-tags',
+    ]
+    RunBuildScript(buildroot, cmd, chromite_cmd=True)
+
+
+def UpdateBinhostJson(buildroot):
+  """Test prebuilts for all boards, making sure everybody gets Chrome prebuilts.
+
+  Args:
+    buildroot: The buildroot of the current build.
+  """
+  cmd = ['../cbuildbot/update_binhost_json']
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+
+def Build(buildroot, board, build_autotest, usepkg, chrome_binhost_only,
+          packages=(), skip_chroot_upgrade=True, noworkon=False,
+          extra_env=None, chrome_root=None):
+  """Wrapper around build_packages.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board to set up.
+    build_autotest: Whether to build autotest-related packages.
+    usepkg: Whether to use binary packages.
+    chrome_binhost_only: If set, only use binary packages on the board for
+      Chrome itself.
+    packages: Tuple of specific packages we want to build. If empty,
+      build_packages will calculate a list of packages automatically.
+    skip_chroot_upgrade: Whether to skip the chroot update. If the chroot is
+      not yet up to date, you should specify skip_chroot_upgrade=False.
+    noworkon: If set, don't force-build workon packages.
+    extra_env: A dictionary of environmental variables to set during generation.
+    chrome_root: The directory where chrome is stored.
+  """
+  cmd = ['./build_packages', '--board=%s' % board,
+         '--accept_licenses=@CHROMEOS', '--withdebugsymbols']
+
+  if not build_autotest:
+    cmd.append('--nowithautotest')
+
+  if skip_chroot_upgrade:
+    cmd.append('--skip_chroot_upgrade')
+
+  if not usepkg:
+    cmd.extend(_LOCAL_BUILD_FLAGS)
+
+  if chrome_binhost_only:
+    cmd.append('--chrome_binhost_only')
+
+  if noworkon:
+    cmd.append('--noworkon')
+
+  chroot_args = []
+  if chrome_root:
+    chroot_args.append('--chrome_root=%s' % chrome_root)
+
+  cmd.extend(packages)
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, chroot_args=chroot_args,
+                 enter_chroot=True)
+
+
+FirmwareVersions = collections.namedtuple(
+    'FirmwareVersions',
+    ['main', 'ec']
+)
+
+
+def GetFirmwareVersions(buildroot, board):
+  """Extract version information from the firmware updater, if one exists.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board the firmware is for.
+
+  Returns:
+    (main fw version, ec fw version)
+    Each element will either be set to the string output by the firmware
+    updater shellball, or None if there is no firmware updater.
+  """
+  updater = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR,
+                         cros_build_lib.GetSysroot(board).lstrip(os.path.sep),
+                         'usr', 'sbin', 'chromeos-firmwareupdate')
+  if not os.path.isfile(updater):
+    return FirmwareVersions(None, None)
+  updater = path_util.ToChrootPath(updater)
+
+  result = cros_build_lib.RunCommand([updater, '-V'], enter_chroot=True,
+                                     capture_output=True, log_output=True,
+                                     cwd=buildroot)
+  main = re.search(r'BIOS version:\s*(?P<version>.*)', result.output)
+  ec = re.search(r'EC version:\s*(?P<version>.*)', result.output)
+  return (main.group('version') if main else None,
+          ec.group('version') if ec else None)
+
+
+def BuildImage(buildroot, board, images_to_build, version=None,
+               rootfs_verification=True, extra_env=None, disk_layout=None):
+
+  # Default to base if images_to_build is passed empty.
+  if not images_to_build:
+    images_to_build = ['base']
+
+  version_str = '--version=%s' % (version or '')
+
+  cmd = ['./build_image', '--board=%s' % board, '--replace', version_str]
+
+  if not rootfs_verification:
+    cmd += ['--noenable_rootfs_verification']
+
+  if disk_layout:
+    cmd += ['--disk_layout=%s' % disk_layout]
+
+  cmd += images_to_build
+
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def GenerateAuZip(buildroot, image_dir, extra_env=None):
+  """Run the script which generates au-generator.zip.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    image_dir: The directory in which to store au-generator.zip.
+    extra_env: A dictionary of environmental variables to set during generation.
+
+  Raises:
+    failures_lib.BuildScriptFailure if the called script fails.
+  """
+  chroot_image_dir = path_util.ToChrootPath(image_dir)
+  cmd = ['./build_library/generate_au_zip.py', '-o', chroot_image_dir]
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def TestAuZip(buildroot, image_dir, extra_env=None):
+  """Run the script which validates an au-generator.zip.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    image_dir: The directory in which to find au-generator.zip.
+    extra_env: A dictionary of environmental variables to set during generation.
+
+  Raises:
+    failures_lib.BuildScriptFailure if the test script fails.
+  """
+  cmd = ['./build_library/test_au_zip.py', '-o', image_dir]
+  RunBuildScript(buildroot, cmd, cwd=constants.CROSUTILS_DIR,
+                 extra_env=extra_env)
+
+
+def BuildVMImageForTesting(buildroot, board, extra_env=None):
+  cmd = ['./image_to_vm.sh', '--board=%s' % board, '--test_image']
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def RunTestImage(buildroot, board, image_dir, results_dir):
+  """Executes test_image on the produced image in |image_dir|.
+
+  The "test_image" script will be run as root in chroot. Running the script as
+  root will allow the tests to read normally-forbidden files such as those
+  owned by root. Running tests inside the chroot allows us to control
+  dependencies better.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board the image was built for.
+    image_dir: The directory in which to find the image.
+    results_dir: The directory to store result files.
+
+  Raises:
+    failures_lib.BuildScriptFailure if the test script fails.
+  """
+  cmd = [
+      'test_image',
+      '--board', board,
+      '--test_results_root', path_util.ToChrootPath(results_dir),
+      path_util.ToChrootPath(image_dir),
+  ]
+  RunBuildScript(buildroot, cmd, enter_chroot=True, chromite_cmd=True,
+                 sudo=True)
+
+
+def RunSignerTests(buildroot, board):
+  cmd = ['./security_test_image', '--board=%s' % board]
+  RunBuildScript(buildroot, cmd, enter_chroot=True)
+
+
+def RunUnitTests(buildroot, board, blacklist=None, extra_env=None):
+  cmd = ['cros_run_unit_tests', '--board=%s' % board]
+
+  if blacklist:
+    cmd += ['--blacklist_packages=%s' % ' '.join(blacklist)]
+
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True,
+                 extra_env=extra_env or {})
+
+
+def RunTestSuite(buildroot, board, image_path, results_dir, test_type,
+                 whitelist_chrome_crashes, archive_dir, ssh_private_key=None):
+  """Runs the test harness suite."""
+  results_dir_in_chroot = os.path.join(buildroot, 'chroot',
+                                       results_dir.lstrip('/'))
+  osutils.RmDir(results_dir_in_chroot, ignore_missing=True)
+
+  cwd = os.path.join(buildroot, 'src', 'scripts')
+  dut_type = 'gce' if test_type == constants.GCE_VM_TEST_TYPE else 'vm'
+
+  cmd = ['bin/ctest',
+         '--board=%s' % board,
+         '--type=%s' % dut_type,
+         '--no_graphics',
+         '--target_image=%s' % image_path,
+         '--test_results_root=%s' % results_dir_in_chroot
+        ]
+
+  if test_type not in constants.VALID_VM_TEST_TYPES:
+    raise AssertionError('Unrecognized test type %r' % test_type)
+
+  if test_type == constants.FULL_AU_TEST_TYPE:
+    cmd.append('--archive_dir=%s' % archive_dir)
+  else:
+    if (test_type == constants.SMOKE_SUITE_TEST_TYPE or test_type ==
+        constants.GCE_VM_TEST_TYPE):
+      cmd.append('--only_verify')
+      cmd.append('--suite